Merge branch 'socprime-master'

This commit is contained in:
Thomas Patzke
2020-05-24 22:58:58 +02:00
34 changed files with 3718 additions and 839 deletions
+2
View File
@@ -55,6 +55,8 @@ test-sigmac:
$(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t qualys -c tools/config/qualys.yml rules/ > /dev/null
$(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t netwitness -c tools/config/netwitness.yml rules/ > /dev/null
$(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t sumologic -O rulecomment -c tools/config/sumologic.yml rules/ > /dev/null
$(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t humio -O rulecomment -c tools/config/humio.yml rules/ > /dev/null
$(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t crowdstrike -O rulecomment -c tools/config/crowdstrike.yml rules/ > /dev/null
$(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t sql -c sysmon rules/ > /dev/null
$(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t logiq -c sysmon rules/ > /dev/null
$(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t splunk -c tools/config/splunk-windows-index.yml -f 'level>=high,level<=critical,status=stable,logsource=windows,tag=attack.execution' rules/ > /dev/null
+101
View File
@@ -0,0 +1,101 @@
title: Azure Sentinel
order: 20
backends:
- ala
- ala-rule
fieldmappings:
ComputerName: Computer
Event-ID: EventID
Event_ID: EventID
eventId: EventID
event_id: EventID
event-id: EventID
eventid: EventID
hashes: Hashes
file_hash: Hashes
url.query: URL
resource.URL: URL
src_ip: SourceIp
source.ip: SourceIp
FileName: TargetFilename
dst_ip: DestinationIP
destination.ip: DestinationIP
event_data.AccessMask: AccessMask
event_data.AllowedToDelegateTo: AllowedToDelegateTo
event_data.AttributeLDAPDisplayName: AttributeLDAPDisplayName
event_data.AuditPolicyChanges: AuditPolicyChanges
event_data.AuthenticationPackageName: AuthenticationPackageName
event_data.CallingProcessName: CallingProcessName
event_data.CallTrace": CallTrace
event_data.CommandLine: CommandLine
Commandline: CommandLine
cmd: CommandLine
event_data.ComputerName: ComputerName
event_data.CurrentDirectory: CurrentDirectory
event_data.Description: Description
event_data.DestinationHostname: DestinationHostname
event_data.DestinationIp: DestinationIp
event_data.DestinationPort: DestinationPort
event_data.Details: Details
event_data.EngineVersion: EngineVersion
event_data.EventType: EventType
event_data.FailureCode: FailureCode
event_data.FileName: FileName
event_data.GrantedAccess: GrantedAccess
event_data.GroupName: GroupName
event_data.GroupSid: GroupSid
event_data.Hashes: Hashes
event_data.HiveName: HiveName
event_data.HostVersion: HostVersion
Image:
service=security: Process
category=process_creation: NewProcessName
default: Image
event_data.Image:
service=security: Process
category=process_creation: NewProcessName
default: Image
event_data.ImageLoaded": ImageLoaded
event_data.ImagePath: ImagePath
event_data.Imphash: Imphash
event_data.IpAddress: IpAddress
event_data.KeyLength: KeyLength
event_data.LogonProcessName: LogonProcessName
event_data.LogonType: LogonType
event_data.NewProcessName: NewProcessName
event_data.ObjectClass: ObjectClass
event_data.ObjectName: ObjectName
event_data.ObjectType: ObjectType
event_data.ObjectValueName: ObjectValueName
event_data.ParentCommandLine: ParentCommandLine
event_data.ParentImage:
category=process_creation: ParentProcessName
default: ParentImage
ParentImage:
category=process_creation: ParentProcessName
default: ParentImage
event_data.ParentProcessName: ParentProcessName
event_data.Path: Path
event_data.PipeName: PipeName
event_data.ProcessCommandLine: CommanProcessCommandLinedLine
event_data.ProcessName: ProcessName
event_data.Properties: Properties
event_data.SecurityID: SecurityID
event_data.ServiceFileName: ServiceFileName
event_data.ServiceName: ServiceName
event_data.ShareName: ShareName
event_data.Signature: Signature
event_data.Source: Source
event_data.SourceImage: SourceImage
event_data.StartModule: StartModule
event_data.Status: Status
event_data.SubjectUserName: SubjectUserName
event_data.SubjectUserSid: SubjectUserSid
event_data.TargetFilename: TargetFilename
event_data.TargetImage: TargetImage
event_data.TargetObject: TargetObject
event_data.TicketEncryptionType: TicketEncryptionType
event_data.TicketOptions: TicketOptions
event_data.User: User
event_data.WorkstationName: WorkstationName
+116 -3
View File
@@ -498,7 +498,7 @@ fieldmappings:
#service=socks:
status_msg:
- 'message'
#subject:
subject:
- 'message'
#service=known_certs:
#service=sip:
@@ -967,7 +967,7 @@ fieldmappings:
auth_success: name
cipher_alg: message
#client: deviceCustomString5
compression_alg:
compression_alg:
cshka: message
direction: deviceDirection
hassh: message
@@ -1054,4 +1054,117 @@ fieldmappings:
id_orig_h: sourceAddress
id_orig_p: sourcePort
id_resp_h: destinationAddress
id_resp_p: destinationPort
id_resp_p: destinationPort
# Temporary one off rule name fields
cs-uri: requestUrl
destination.domain:
destination.ip: destinationAddress
destination.port: destinationPort
http.response.status_code: deviceSeverity
#http.request.body.content
source.domain:
#sourceAddress: #TONOTE: is arcsight
source.port: sourcePort
agent.version: deviceCustomString2
c-ip: sourceAddress
clientip: sourceAddress
clientIP: sourceAddress
dest_domain:
- url.domain
dest_ip: destinationAddress
dest_port: destinationPort
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname: destinationHostName
#DestinationAddress: #TONOTE: is arcsight
#DestinationHostname: #TONOTE: is arcsight
DestinationIp: destinationAddress
DestinationIP: destinationAddress
DestinationPort: destinationPort
dst-ip: destinationAddress
dstip: destinationAddress
dstport: destinationPort
Host: requestHost
#host:
HostVersion: deviceCustomString2
http_host: destinationHostName
http_uri: requestUrl
http_url: requestUrl
http_user_agent:
- deviceCustomString5
- requestClientApplication
http.request.url-query-params:
- requestUrl
- requestUrlQuery
HttpMethod: requestMethod
in_url: requestUrl
#parent_domain:
# - url.registered_domain
# - destination.registered_domain
post_url_parameter: requestUrl
Request Url: requestUrl
request_url: requestUrl
request_URL: requestUrl
RequestUrl: requestUrl
#response: http.response.status_code
resource.url: requestUrl
resource.URL: requestUrl
sc_status: deviceSeverity
sender_domain: message
service.response_code: deviceSeverity
SourceAddr: sourceAddress
SourceAddress: sourceAddress
SourceIP: sourceAddress
SourceIp: sourceAddress
SourceNetworkAddress:
- source.address
- sourceAddress
SourcePort: sourcePort
srcip: sourceAddress
Status: deviceSeverity
#status: deviceSeverity
url: requestUrl
URL: requestUrl
url_query:
- requestUrl
- requestUrlQuery
url.query:
- requestUrl
- requestUrlQuery
uri_path: requestUrl
#user_agent: user_agent.original
user_agent.name:
- deviceCustomString5
- requestClientApplication
user-agent:
- deviceCustomString5
- requestClientApplication
User-Agent:
- deviceCustomString5
- requestClientApplication
useragent:
- deviceCustomString5
- requestClientApplication
UserAgent:
- deviceCustomString5
- requestClientApplication
User Agent:
- deviceCustomString5
- requestClientApplication
web_dest: destinationHostName
web.dest: destinationHostName
Web.dest: destinationHostName
web.host: destinationHostName
Web.host: destinationHostName
web_method: requestMethod
Web_method: requestMethod
web.method: requestMethod
Web.method: requestMethod
web_src: sourceAddress
web_status: deviceSeverity
Web_status: deviceSeverity
web.status: deviceSeverity
Web.status: deviceSeverity
web_uri: requestUrl
web_url: requestUrl
+129 -1
View File
@@ -349,4 +349,132 @@ fieldmappings:
keywords:
- deviceCustomString1
ScriptBlockText:
- deviceCustomString1
- deviceCustomString1
AccessMask: deviceCustomString1
AccountName: deviceCustomString1
AllowedToDelegateTo: deviceCustomString1
AttributeLDAPDisplayName: deviceCustomString1
AuditPolicyChanges: deviceCustomString1
AuthenticationPackageName: deviceCustomString1
CallingProcessName: deviceCustomString1
Command: deviceCustomString1
Command_Line: deviceCustomString1
ComputerName: deviceCustomString1
destination.domain: deviceCustomString1
DestinationIP: deviceCustomString1
EngineVersion: deviceCustomString1
Event: deviceCustomString1
event.category: deviceCustomString1
event.raw: deviceCustomString1
event_data.AccessMask: deviceCustomString1
event_data.AccountName: deviceCustomString1
event_data.AllowedToDelegateTo: deviceCustomString1
event_data.AttributeLDAPDisplayName: deviceCustomString1
event_data.AuditPolicyChanges: deviceCustomString1
event_data.AuthenticationPackageName: deviceCustomString1
event_data.CallingProcessName: deviceCustomString1
event_data.CallTrace: deviceCustomString1
event_data.CommandLine: deviceCustomString1
event_data.ComputerName: deviceCustomString1
event_data.CurrentDirectory: deviceCustomString1
event_data.Description: deviceCustomString1
event_data.DestinationHostname: deviceCustomString1
event_data.DestinationIp: deviceCustomString1
event_data.DestinationIsIpv6: deviceCustomString1
event_data.DestinationPort: deviceCustomString1
event_data.Details: deviceCustomString1
event_data.EngineVersion: deviceCustomString1
event_data.EventType: deviceCustomString1
event_data.FailureCode: deviceCustomString1
event_data.FileName: deviceCustomString1
event_data.GrantedAccess: deviceCustomString1
event_data.GroupName: deviceCustomString1
event_data.GroupSid: deviceCustomString1
event_data.Hashes: deviceCustomString1
event_data.HiveName: deviceCustomString1
event_data.HostVersion: deviceCustomString1
event_data.Image: deviceCustomString1
event_data.ImageLoaded: deviceCustomString1
event_data.ImagePath: deviceCustomString1
event_data.Imphash: deviceCustomString1
event_data.IpAddress: deviceCustomString1
event_data.KeyLength: deviceCustomString1
event_data.LogonProcessName: deviceCustomString1
event_data.LogonType: deviceCustomString1
event_data.NewProcessName: deviceCustomString1
event_data.ObjectClass: deviceCustomString1
event_data.ObjectName: deviceCustomString1
event_data.ObjectType: deviceCustomString1
event_data.ObjectValueName: deviceCustomString1
event_data.ParentCommandLine: deviceCustomString1
event_data.ParentImage: deviceCustomString1
event_data.ParentProcessName: deviceCustomString1
event_data.Path: deviceCustomString1
event_data.PipeName: deviceCustomString1
event_data.ProcessCommandLine: deviceCustomString1
event_data.ProcessName: deviceCustomString1
event_data.Properties: deviceCustomString1
event_data.SecurityID: deviceCustomString1
event_data.ServiceFileName: deviceCustomString1
event_data.ServiceName: deviceCustomString1
event_data.ShareName: deviceCustomString1
event_data.Signature: deviceCustomString1
event_data.Source: deviceCustomString1
event_data.SourceImage: deviceCustomString1
event_data.StartModule: deviceCustomString1
event_data.Status: deviceCustomString1
event_data.SubjectUserName: deviceCustomString1
event_data.SubjectUserSid: deviceCustomString1
event_data.TargetFilename: deviceCustomString1
event_data.TargetImage: deviceCustomString1
event_data.TargetObject: deviceCustomString1
event_data.TicketEncryptionType: deviceCustomString1
event_data.TicketOptions: deviceCustomString1
event_data.User: deviceCustomString1
event_data.WorkstationName: deviceCustomString1
FailureCode: deviceCustomString1
GroupName: deviceCustomString1
GroupSid: deviceCustomString1
hashes: deviceCustomString1
Header.Accept: deviceCustomString1
HiveName: deviceCustomString1
host.scan.vuln_name: deviceCustomString1
HostVersion: deviceCustomString1
ImagePath: deviceCustomString1
Imphash: deviceCustomString1
IpAddress: deviceCustomString1
IpPort: deviceCustomString1
KeyLength: deviceCustomString1
log_name: deviceCustomString1
LogonType: deviceCustomString1
NewProcessName: deviceCustomString1
ObjectClass: deviceCustomString1
ObjectName: deviceCustomString1
ObjectType: deviceCustomString1
ObjectValueName: deviceCustomString1
ParentProcessName: deviceCustomString1
Path: deviceCustomString1
ProcessCommandLine: deviceCustomString1
ProcessName: deviceCustomString1
Properties: deviceCustomString1
resource.URL: deviceCustomString1
SecurityEvent: deviceCustomString1
SecurityID: deviceCustomString1
SelectionURL: deviceCustomString1
ServiceFileName: deviceCustomString1
ServiceName: deviceCustomString1
ShareName: deviceCustomString1
Source: deviceCustomString1
source_name: deviceCustomString1
SourceIP: deviceCustomString1
Status: deviceCustomString1
SubjectDomainName: deviceCustomString1
SubjectUserName: deviceCustomString1
SubjectUserSid: deviceCustomString1
SysmonEvent: deviceCustomString1
TargetDomainName: deviceCustomString1
TargetUserSid: deviceCustomString1
TicketEncryptionType: deviceCustomString1
TicketOptions: deviceCustomString1
winlog.channel: deviceCustomString1
WorkstationName: deviceCustomString1
+19
View File
@@ -0,0 +1,19 @@
title: Splunk Windows log source conditions
order: 20
backends:
- crowdstrike
logsources:
windows-sysmon:
product: windows
service: sysmon
conditions:
EventID: 1
process_creation_1:
category: process_creation
product: windows
fieldmappings:
EventID: EventID
CommandLine: Commandline
Command_Line: Commandline
Image: ImageFileName
+69
View File
@@ -0,0 +1,69 @@
title: Elastic Common Schema mapping for proxy and webserver logs including NSM DNS logs (zeek/suricata)
order: 20
backends:
- es-qs
- es-dsl
- elasticsearch-rule
- kibana
- xpack-watcher
- elastalert
- elastalert-dsl
# zeek-category-dns:
# category: dns
# conditions:
# event.dataset: dns
# zeek-dns:
# product: zeek
# service: dns
# conditions:
# event.dataset: dns
defaultindex:
- filebeat-*
# logsourcemerging: or
fieldmappings:
# All Logs Applied Mapping & Taxonomy
dst:
- destination.address
- destination.ip
dst_ip:
- destination.address
- destination.ip
dst_port: destination.port
src:
- source.address
- source.ip
src_ip:
- source.address
- source.ip
src_port: source.port
# DNS Taxonomy
answer: dns.answers.name
c-dns: dns.question.name
parent_domain: dns.question.registered_domain
query: dns.question.name
QueryName: dns.question.name
r-dns: dns.question.name
record_type: dns.answers.type
response: dns.answers
#question_length:
# Zeek DNS specific
AA: dns.AA
addl: dns.addl
answers: dns.answers.name
auth: dns.auth
qclass_name: dns.question.class
qclass: dns.qclass
qtype_name: dns.question.type
qtype: dns.qtype
query: dns.question.name
#question_length: labels.dns.query_length
RA: dns.RA
rcode_name: dns.response_code
rcode: dns.rcode
RD: dns.RD
rejected: dns.rejected
rtt: dns.rtt
TC: dns.TC
trans_id: dns.id
TTLs: dns.answers.ttl
Z: dns.Z
+201 -9
View File
@@ -1,30 +1,222 @@
title: Elastic Common Schema mapping for proxy logs
title: Elastic Common Schema mapping for proxy and webserver logs including NSM logs (zeek/suricata)
order: 20
backends:
- es-qs
- es-dsl
- es-rule
- corelight_elasticsearch-rule
- kibana
- xpack-watcher
- elastalert
- elastalert-dsl
- ee-outliers
logsources:
proxy:
category: proxy
index: filebeat-*
defaultindex:
- filebeat-*
fieldmappings:
c-uri: url.original
# All Logs Applied Mapping & Taxonomy
dst:
- destination.address
- destination.ip
dst_ip:
- destination.address
- destination.ip
dst_port: destination.port
src:
- source.address
- source.ip
src_ip:
- source.address
- source.ip
src_port: source.port
# Web/Proxy Taxonomy
cs-bytes: http.request.body.bytes
cs-cookie-vars: http.cookie_vars
c-uri-extension: url.extension
c-uri-query: url.query
c-uri-stem: url.original
c-uri: url.original
c-useragent: user_agent.original
cs-bytes: http.request.body.bytes
cs-cookie: http.cookie
cs-host: url.domain
cs-host:
- url.domain
- destination.domain
cs-method: http.request.method
cs-referrer: http.request.referrer
cs-version: http.version
r-dns: url.domain
sc-status: http.response.status_code
r-dns:
- destination.domain
- url.domain
sc-bytes: http.response.body.bytes
sc-status: http.response.status_code
# Temporary one off rule name fields
destination.domain:
# destination.ip:
# destination.port:
# http.response.status_code
# http.request.body.content
# source.domain:
# source.ip:
# source.port:
agent.version: http.version
c-ip:
- source.address
- source.ip
clientip:
- source.address
- source.ip
clientIP:
- source.address
- source.ip
dest_domain:
- destination.domain
- url.domain
dest_ip:
- destination.address
- destination.ip
dest_port: destination.port
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname:
- destination.domain
- url.domain
DestinationAddress:
DestinationHostname:
- destination.domain
- url.domain
DestinationIp:
- destination.address
- destination.ip
DestinationIP:
- destination.address
- destination.ip
DestinationPort: destination.port
dst-ip:
- destination.address
- destination.ip
dstip:
- destination.address
- destination.ip
dstport: destination.port
Host:
- destination.domain
- url.domain
host:
- destination.domain
- url.domain
HostVersion: http.version
http_host:
- destination.domain
- url.domain
http_uri: url.original
http_url: url.original
http_user_agent: user_agent.original
http.request.url-query-params: url.original
HttpMethod: http.request.method
in_url: url.original
parent_domain:
- url.registered_domain
- destination.registered_domain
post_url_parameter: url.original
Request Url: url.original
request_url: url.original
request_URL: url.original
RequestUrl: url.original
response: http.response.status_code
resource.url: url.original
resource.URL: url.original
sc_status: http.response.status_code
sender_domain:
- destination.domain
- url.domain
service.response_code: http.response.status_code
source:
- source.address
- source.ip
SourceAddr:
- source.address
- source.ip
SourceAddress:
- source.address
- source.ip
SourceIP:
- source.address
- source.ip
SourceIp:
- source.address
- source.ip
SourceNetworkAddress:
- source.address
- source.ip
SourcePort: source.port
srcip:
- source.address
- source.ip
Status: http.response.status_code
status: http.response.status_code
url: url.original
URL: url.original
url_query: url.original
url.query: url.original
uri_path: url.original
user_agent: user_agent.original
user_agent.name: user_agent.original
user-agent: user_agent.original
User-Agent: user_agent.original
useragent: user_agent.original
UserAgent: user_agent.original
web_dest:
- url.domain
- destination.domain
web.dest:
- url.domain
- destination.domain
Web.dest:
- url.domain
- destination.domain
web.host:
- url.domain
- destination.domain
Web.host:
- url.domain
- destination.domain
web_method: http.request.method
Web_method: http.request.method
web.method: http.request.method
Web.method: http.request.method
web_src:
- source.address
- source.ip
web_status: http.response.status_code
Web_status: http.response.status_code
web.status: http.response.status_code
Web.status: http.response.status_code
web_uri: url.original
web_url: url.original
# Zeek HTTP as Proxy/Web
client_header_names: http.client_header_names
cookie_vars: http.cookie_vars
flash_version: http.flash_version
info_code: http.info_code
info_msg: http.info_msg
method: http.request.method
omniture: http.omniture
orig_filenames: http.orig_filenames
orig_mime_types: http.orig_mime_types
origin: http.origin
#password: source.user.password
post_body: http.post_body
proxied: http.proxied
referrer: http.request.referrer
request_body_len: http.request.body.bytes
resp_filenames: http.resp_filenames
resp_mime_types: http.resp_mime_types
response_body_len: http.response.body.bytes
server_header_names: http.server_header_names
status_code: http.response.status_code
status_msg: http.status_msg
trans_depth: http.trans_depth
uri_vars: http.uri_vars
username: source.user.name
version: http.version
+269 -44
View File
@@ -35,16 +35,18 @@ logsources:
rewrite:
product: zeek
service: dns
conditions:
event.dataset: dns
zeek-category-proxy:
category: proxy
rewrite:
product: zeek
service: http
product: zeek
service: http
zeek-category-webserver:
category: webserver
rewrite:
product: zeek
service: http
product: zeek
service: http
zeek-conn:
product: zeek
service: conn
@@ -396,134 +398,250 @@ fieldmappings:
uid: log.id.uid
uids: log.id.uids
uuid: log.id.uuid
# Overlapping fields/mappings (aka: shared fields)
# Deep mappings / Overlapping fields/mappings (aka: shared fields)
#_action
action: '*.action'
action:
#- '*.action'
service=mqtt: mqtt.action
service=smb_files: smb.action
service=tunnel: tunnel.action
mqtt_action: smb.action
smb_action: smb.action
tunnel_action: tunnel.action
#_addl
addl: weird.addl
addl:
#- '*.addl'
service=dns: dns.addl
service=weird: weird.addl
dns_addl: dns.addl
weird_addl: weird.addl
#_analyzer
analyzer: '*.analyzer'
analyzer:
#- '*.analyzer'
service=dpd: dpd.analyzer
service=files: files.analyzer
dpd_analyzer: dpd.analyzer
files_analyzer: file.analyzer
#_arg
arg: '*.arg'
arg:
#- '*.arg'
service=ftp: ftp.arg
service=msqyl: mysql.arg
service=pop3: pop3.arg
ftp_arg: ftp.arg
pop3_arg: pop3.arg
mysql_arg: mysql.arg
pop3_arg: pop3.arg
#_auth
#auth:
#service=rfb: rfb.auth #RFB does not exist in newer logs, so skipping to cover dns.auth
auth:
#- dns.auth
service=dns: dns.auth
service=rfb: rfb.auth
dns_auth: dns.auth
rfb_auth: rfb.auth
#_cipher
cipher: tls.cipher
cipher:
#- '*.client'
service=kerberos: kerberos.cipher
service=ssl: tls.cipher
kerberos_cipher: kerberos.cipher
ssl_cipher: tls.cipher
tls_cipher: tls.cipher
#_client
client: '*.client'
client:
#- '*.client'
service=kerberos: kerberos.client
service=ssh: ssh.client
kerberos_client: kerberos.client
ssh_client: ssh.client
#_command
command: '*.command'
command:
#- '*.command'
service=irc: irc.command
service=ftp: ftp.command
service=pop3: pop3.command
ftp_command: ftp.command
irc_command: ssh.client
irc_command: irc.command
pop3_command: pop3.command
#_date
date: '*.date'
date:
#- '*.date'
service=sip: sip.date
service=smtp: smtp.date
sip_date: sip.date
smtp_date: smtp.date
#_duration
duration: event.duration
duration:
#- event.duration
service=conn: event.duration
service=files: files.duration
service=snmp: event.duration
conn_duration: event.duration
files_duration: files.duration
snmp_duration: event.duration
#_from
from: '*.from'
from:
#- '*.from'
service=kerberos: kerberos.from
service=smtp: smtp.from
kerberos_from: kerberos.from
smtp_from: smtp.from
#_is_orig
is_orig: '*.is_orig'
is_orig_file: file.is_orig
is_orig_pop3: pop3.is_orig
is_orig:
#- '*.is_orig'
service=file: file.is_orig
service=pop3: pop3.is_orig
files_is_orig: file.is_orig
pop3_is_orig: pop3.is_orig
#_local_orig
local_orig: '*.local_orig'
local_orig:
#- '*.local_orig'
service=conn: conn.local_orig
service=files: file.local_orig
conn_local_orig: conn.local_orig
files_local_orig: file.local_orig
#_method
method: http.request.method
method:
#- http.request.method
service=http: http.request.method
service=sip: sip.method
http_method: http.request.method
sip_method: sip.method
#_msg
msg: notice.msg
msg:
#- notice.msg
service=notice: notice.msg
service=pop3: pop3.msg
notice_msg: notice.msg
pop3_msg: pop3.msg
#_name
name: file.name
name:
#- file.name
service=smb_files: file.name
service=software: software.name
service=weird: weird.name
smb_files_name: file.name
software_name: software.name
weird_name: weird.name
#_path
path: file.path
path:
#- file.path
service=smb_files: file.path
service=smb_mapping: file.path
service=smtp: smtp.path
smb_files_path: file.path
smb_mapping_path: file.path
smtp_path: smtp.path
#_reply_msg
reply_msg: '*.reply_msg'
reply_msg:
#- '*.reply_msg'
service=ftp: ftp.reply_msg
service=radius: radius.reply_msg
ftp_reply_msg: ftp.reply_msg
radius_reply_msg: radius.reply_msg
#_reply_to
reply_to: '*.reply_to'
reply_to:
#- '*.reply_to'
service=sip: sip.reply_to
service=smtp: smtp.reply_to
sip_reply_to: sip.reply_to
smtp_reply_to: smtp.reply_to
#_response_body_len
response_body_len: http.response.body.bytes
response_body_len:
#- http.response.body.bytes
service=http: http.response.body.bytes
service=sip: sip.response_body_len
http_response_body_len: http.response.body.bytes
sip_response_body_len: sip.response_body_len
#_request_body_len
request_body_len: http.request.body.bytes
request_body_len:
#- http.request.body.bytes
service=http: http.response.body.bytes
service=sip: sip.request_body_len
http_request_body_len: http.response.body.bytes
sip_request_body_len: sip.response_body_len
#_rtt
#rtt:
#- event.duration
#- 'zeek.*.rtt'
#service=dns: event.duration
#service=dce_rpc: event.duration
dns_rtt: event.duration
dce_rpc_rtt: event.duration
#_service
service: '*.service'
service:
#- '*.service'
service=kerberos: kerberos.service
service=smb_mapping: smb.service
kerberos_service: kerberos.service
smb_mapping_kerberos: smb.service
#_status
status: '*.status'
status:
#- '*.status'
service=mqtt: mqtt.status
service=pop3: pop3.status
service=socks: socks.status
mqtt_status: mqtt.status
pop3_status: pop3.status
socks_status: socks.status
#_status_code
status_code: 'http.response.status_code'
status_code:
#- 'http.response.status_code'
service=http: http.response.status_code
service=sip: sip.status_code
http_status_code: http.response.status_code
sip_status_code: sip.status_code
#_status_msg
status_msg: http.status_msg
status_msg:
#- '*.status_msg'
service=http: http.status_msg
service=sip: sip.status_msg
http_status_msg: http.status_msg
sip_status_msg: sip.status_msg
#_subject
subject: tls.subject
subject:
#- '*.subject'
service=known_certs: known_certs.subject
service=sip: sip.subject
service=smtp: smtp.subject
service=ssl: tls.subject
known_certs_subject: known_certs.subject
sip_subject: sip.subject
smtp_subject: smtp.subject
ssl_subject: tls.subject
#_service
#_trans_depth
trans_depth: '*.trans_depth'
trans_depth:
#- '*.trans_depth'
service=http: http.trans_depth
service=sip: sip.trans_depth
service=smtp: smtp.trans_depth
http_trans_depth: http.trans_depth
sip_trans_depth: sip.trans_depth
smtp_trans_depth: smtp.trans_depth
#_user_agent
#user_agent: #already normalized
http_user_agent: user_agent.original
gquic_user_agent: user_agent.original
sip_user_agent: user_agent.original
smtp_user_agent: user_agent.original
#_version
version: '*.version'
version:
#- '*.version'
service=gquic: gquic.version
service=http: http.version
service=ntp: ntp.version
service=socks: socks.version
service=snmp: snmp.version
service=ssh: ssh.version
service=tls: tls.version
gquic_version: gquic.version
http_version: http.version
ntp_version: ntp.version
socks_version: socks.version
snmp_version: snmp.version
ssh_version: ssh.version
ssl_version: tls.version
tls_version: tls.version
# Conn and Conn Long
cache_add_rx_ev: conn.cache_add_rx_ev
@@ -579,7 +697,6 @@ fieldmappings:
# DNS
AA: dns.AA
#addl: dns.addl
auth: dns.auth
answers: dns.answers.name
TTLs: dns.answers.ttl
RA: dns.RA
@@ -1055,11 +1172,119 @@ fieldmappings:
id_resp_p: destination.port
# Temporary one off rule name fields
cs-uri: url.original
# destination.domain:
# destination.ip:
# destination.port:
# http.response.status_code
# http.request.body.content
# source.domain:
# source.ip:
# source.port:
agent.version: http.version
c-ip: source.ip
clientip: source.ip
clientIP: source.io
clientIP: source.ip
dest_domain:
- query
- host
- server_name
- destination.domain
- url.domain
dest_ip: destination.ip
dest_port: destination.port
dest_port: destination.port
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname:
- destination.domain
- url.domain
DestinationAddress: destination.ip
DestinationHostname:
- destination.domain
- url.domain
DestinationIp: destination.ip
DestinationIP: destination.ip
DestinationPort: destination.port
dst-ip: destination.ip
dstip: destination.ip
dstport: destination.port
Host:
- destination.domain
- url.domain
#host:
# - destination.domain
# - url.domain
HostVersion: http.version
http_host:
- destination.domain
- url.domain
http_uri: url.original
http_url: url.original
#http_user_agent: user_agent.original
http.request.url-query-params: url.original
HttpMethod: http.request.method
in_url: url.original
#parent_domain:
# - url.registered_domain
# - destination.registered_domain
post_url_parameter: url.original
Request Url: url.original
request_url: url.original
request_URL: url.original
RequestUrl: url.original
#response: http.response.status_code
resource.url: url.original
resource.URL: url.original
sc_status: http.response.status_code
sender_domain:
- destination.domain
- url.domain
service.response_code: http.response.status_code
SourceAddr:
- source.address
- source.ip
SourceAddress: source.ip
SourceIP: source.ip
SourceIp: source.ip
SourceNetworkAddress:
- source.address
- source.ip
SourcePort: source.port
srcip: source.ip
Status: http.response.status_code
#status: http.response.status_code
url: url.original
URL: url.original
url_query: url.original
url.query: url.original
uri_path: url.original
#user_agent: user_agent.original
user_agent.name: user_agent.original
user-agent: user_agent.original
User-Agent: user_agent.original
useragent: user_agent.original
UserAgent: user_agent.original
User Agent: user_agent.original
web_dest:
- url.domain
- destination.domain
web.dest:
- url.domain
- destination.domain
Web.dest:
- url.domain
- destination.domain
web.host:
- url.domain
- destination.domain
Web.host:
- url.domain
- destination.domain
web_method: http.request.method
Web_method: http.request.method
web.method: http.request.method
Web.method: http.request.method
web_src: source.ip
web_status: http.response.status_code
Web_status: http.response.status_code
web.status: http.response.status_code
Web.status: http.response.status_code
web_uri: url.original
web_url: url.original
@@ -0,0 +1,2 @@
defaultindex:
- filebeat-*
@@ -0,0 +1,2 @@
defaultindex:
- logstash-*
+3
View File
@@ -0,0 +1,3 @@
defaultindex:
- logstash-*
- filebeat-*
+15
View File
@@ -0,0 +1,15 @@
logsources:
apache:
category: webserver
index: logstash-apache-*
webapp-error:
category: application
index: logstash-apache_error-*
linux-auth:
product: linux
service: auth
index: logstash-auth-*
fieldmappings:
client_ip: clientip
url: request
defaultindex: logstash-*
+30
View File
@@ -0,0 +1,30 @@
logsources:
windows:
product: windows
index: logstash-windows-*
windows-application:
product: windows
service: application
conditions:
EventLog: Application
windows-security:
product: windows
service: security
conditions:
EventLog: Security
windows-sysmon:
product: windows
service: sysmon
conditions:
EventLog: Microsoft-Windows-Sysmon
windows-dns-server:
product: windows
service: dns-server
conditions:
EventLog: 'DNS Server'
windows-driver-framework:
product: windows
service: driver-framework
conditions:
source: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational'
defaultindex: logstash-*
+95
View File
@@ -0,0 +1,95 @@
logsources:
windows:
product: windows
index: <winlogbeat-{now/d}>
windows-application:
product: windows
service: application
conditions:
log_name: Application
windows-security:
product: windows
service: security
conditions:
log_name: Security
windows-sysmon:
product: windows
service: sysmon
conditions:
log_name: 'Microsoft-Windows-Sysmon/Operational'
windows-dns-server:
product: windows
service: dns-server
conditions:
log_name: 'DNS Server'
windows-driver-framework:
product: windows
service: driver-framework
conditions:
source: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational'
defaultindex: <winlogbeat-{now/d}>
# Extract all field names qith yq:
# yq -r '.detection | del(.condition) | map(keys) | .[][]' $(find sigma/rules/windows -name '*.yml') | sort -u | grep -v ^EventID$ | sed 's/^\(.*\)/ \1: event_data.\1/g'
# Keep EventID! Clean up the list afterwards!
fieldmappings:
EventID: event_id
AccessMask: event_data.AccessMask
AccountName: event_data.AccountName
AllowedToDelegateTo: event_data.AllowedToDelegateTo
AttributeLDAPDisplayName: event_data.AttributeLDAPDisplayName
AuditPolicyChanges: event_data.AuditPolicyChanges
AuthenticationPackageName: event_data.AuthenticationPackageName
CallingProcessName: event_data.CallingProcessName
CallTrace: event_data.CallTrace
CommandLine: event_data.CommandLine
ComputerName: event_data.ComputerName
CurrentDirectory: event_data.CurrentDirectory
Description: event_data.Description
DestinationHostname: event_data.DestinationHostname
DestinationIp: event_data.DestinationIp
DestinationIsIpv6: event_data.DestinationIsIpv6
DestinationPort: event_data.DestinationPort
Details: event_data.Details
EngineVersion: event_data.EngineVersion
EventType: event_data.EventType
FailureCode: event_data.FailureCode
FileName: event_data.FileName
GrantedAccess: event_data.GrantedAccess
GroupName: event_data.GroupName
Hashes: event_data.Hashes
HiveName: event_data.HiveName
HostVersion: event_data.HostVersion
Image: event_data.Image
ImageLoaded: event_data.ImageLoaded
ImagePath: event_data.ImagePath
Imphash: event_data.Imphash
LogonProcessName: event_data.LogonProcessName
LogonType: event_data.LogonType
NewProcessName: event_data.NewProcessName
ObjectClass: event_data.ObjectClass
ObjectName: event_data.ObjectName
ObjectType: event_data.ObjectType
ObjectValueName: event_data.ObjectValueName
ParentCommandLine: event_data.ParentCommandLine
ParentImage: event_data.ParentImage
Path: event_data.Path
PipeName: event_data.PipeName
ProcessName: event_data.ProcessName
Properties: event_data.Properties
ServiceFileName: event_data.ServiceFileName
ServiceName: event_data.ServiceName
ShareName: event_data.ShareName
Signature: event_data.Signature
Source: event_data.Source
SourceImage: event_data.SourceImage
StartModule: event_data.StartModule
Status: event_data.Status
SubjectUserName: event_data.SubjectUserName
TargetFilename: event_data.TargetFilename
TargetImage: event_data.TargetImage
TargetObject: event_data.TargetObject
TicketEncryptionType: event_data.TicketEncryptionType
TicketOptions: event_data.TicketOptions
User: event_data.User
WorkstationName: event_data.WorkstationName
+94
View File
@@ -0,0 +1,94 @@
logsources:
windows:
product: windows
index: winlogbeat-*
windows-application:
product: windows
service: application
conditions:
log_name: Application
windows-security:
product: windows
service: security
conditions:
log_name: Security
windows-sysmon:
product: windows
service: sysmon
conditions:
log_name: 'Microsoft-Windows-Sysmon/Operational'
windows-dns-server:
product: windows
service: dns-server
conditions:
log_name: 'DNS Server'
windows-driver-framework:
product: windows
service: driver-framework
conditions:
source: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational'
defaultindex: winlogbeat-*
# Extract all field names qith yq:
# yq -r '.detection | del(.condition) | map(keys) | .[][]' $(find sigma/rules/windows -name '*.yml') | sort -u | grep -v ^EventID$ | sed 's/^\(.*\)/ \1: event_data.\1/g'
# Keep EventID! Clean up the list afterwards!
fieldmappings:
EventID: event_id
AccessMask: event_data.AccessMask
AccountName: event_data.AccountName
AllowedToDelegateTo: event_data.AllowedToDelegateTo
AttributeLDAPDisplayName: event_data.AttributeLDAPDisplayName
AuditPolicyChanges: event_data.AuditPolicyChanges
AuthenticationPackageName: event_data.AuthenticationPackageName
CallingProcessName: event_data.CallingProcessName
CallTrace: event_data.CallTrace
CommandLine: event_data.CommandLine
ComputerName: event_data.ComputerName
CurrentDirectory: event_data.CurrentDirectory
Description: event_data.Description
DestinationHostname: event_data.DestinationHostname
DestinationIp: event_data.DestinationIp
DestinationIsIpv6: event_data.DestinationIsIpv6
DestinationPort: event_data.DestinationPort
Details: event_data.Details
EngineVersion: event_data.EngineVersion
EventType: event_data.EventType
FailureCode: event_data.FailureCode
FileName: event_data.FileName
GrantedAccess: event_data.GrantedAccess
GroupName: event_data.GroupName
Hashes: event_data.Hashes
HiveName: event_data.HiveName
HostVersion: event_data.HostVersion
Image: event_data.Image
ImageLoaded: event_data.ImageLoaded
ImagePath: event_data.ImagePath
Imphash: event_data.Imphash
LogonProcessName: event_data.LogonProcessName
LogonType: event_data.LogonType
NewProcessName: event_data.NewProcessName
ObjectClass: event_data.ObjectClass
ObjectName: event_data.ObjectName
ObjectType: event_data.ObjectType
ObjectValueName: event_data.ObjectValueName
ParentCommandLine: event_data.ParentCommandLine
ParentImage: event_data.ParentImage
Path: event_data.Path
PipeName: event_data.PipeName
ProcessName: event_data.ProcessName
Properties: event_data.Properties
ServiceFileName: event_data.ServiceFileName
ServiceName: event_data.ServiceName
ShareName: event_data.ShareName
Signature: event_data.Signature
Source: event_data.Source
SourceImage: event_data.SourceImage
StartModule: event_data.StartModule
Status: event_data.Status
SubjectUserName: event_data.SubjectUserName
TargetFilename: event_data.TargetFilename
TargetImage: event_data.TargetImage
TargetObject: event_data.TargetObject
TicketEncryptionType: event_data.TicketEncryptionType
TicketOptions: event_data.TicketOptions
User: event_data.User
WorkstationName: event_data.WorkstationName
+468
View File
@@ -0,0 +1,468 @@
title: Zeek field mappings for default collection of JSON logs with no parsing/normalization done and sending into logstash-*index
order: 20
backends:
- es-qs
- es-dsl
- elasticsearch-rule
- kibana
- xpack-watcher
- elastalert
- elastalert-dsl
logsources:
zeek:
product: zeek
index: 'logstash*'
zeek-category-accounting:
category: accounting
rewrite:
product: zeek
service: syslog
zeek-category-firewall:
category: firewall
conditions:
'@stream': conn
zeek-category-dns:
category: dns
conditions:
'@stream': dns
zeek-category-proxy:
category: proxy
rewrite:
product: zeek
service: http
zeek-category-webserver:
category: webserver
conditions:
'@stream': http
rewrite:
product: zeek
service: http
zeek-conn:
product: zeek
service: conn
conditions:
'@stream': conn
zeek-conn_long:
product: zeek
service: conn_long
conditions:
'@stream': conn_long
zeek-dce_rpc:
product: zeek
service: dce_rpc
conditions:
'@stream': dce_rpc
zeek-dns:
product: zeek
service: dns
conditions:
'@stream': dns
zeek-dnp3:
product: zeek
service: dnp3
conditions:
'@stream': dnp3
zeek-dpd:
product: zeek
service: dpd
conditions:
'@stream': dpd
zeek-files:
product: zeek
service: files
conditions:
'@stream': files
zeek-ftp:
product: zeek
service: ftp
conditions:
'@stream': ftp
zeek-gquic:
product: zeek
service: gquic
conditions:
'@stream': gquic
zeek-http:
product: zeek
service: http
conditions:
'@stream': http
zeek-http2:
product: zeek
service: http2
conditions:
'@stream': http2
zeek-intel:
product: zeek
service: intel
conditions:
'@stream': intel
zeek-irc:
product: zeek
service: irc
conditions:
'@stream': irc
zeek-kerberos:
product: zeek
service: kerberos
conditions:
'@stream': kerberos
zeek-known_certs:
product: zeek
service: known_certs
conditions:
'@stream': known_certs
zeek-known_hosts:
product: zeek
service: known_hosts
conditions:
'@stream': known_hosts
zeek-known_modbus:
product: zeek
service: known_modbus
conditions:
'@stream': known_modbus
zeek-known_services:
product: zeek
service: known_services
conditions:
'@stream': known_services
zeek-modbus:
product: zeek
service: modbus
conditions:
'@stream': modbus
zeek-modbus_register_change:
product: zeek
service: modbus_register_change
conditions:
'@stream': modbus_register_change
zeek-mqtt_connect:
product: zeek
service: mqtt_connect
conditions:
'@stream': mqtt_connect
zeek-mqtt_publish:
product: zeek
service: mqtt_publish
conditions:
'@stream': mqtt_publish
zeek-mqtt_subscribe:
product: zeek
service: mqtt_subscribe
conditions:
'@stream': mqtt_subscribe
zeek-mysql:
product: zeek
service: mysql
conditions:
'@stream': mysql
zeek-notice:
product: zeek
service: notice
conditions:
'@stream': notice
zeek-ntlm:
product: zeek
service: ntlm
conditions:
'@stream': ntlm
zeek-ntp:
product: zeek
service: ntp
conditions:
'@stream': ntp
zeek-ocsp:
product: zeek
service: ntp
conditions:
'@stream': ocsp
zeek-pe:
product: zeek
service: pe
conditions:
'@stream': pe
zeek-pop3:
product: zeek
service: pop3
conditions:
'@stream': pop3
zeek-radius:
product: zeek
service: radius
conditions:
'@stream': radius
zeek-rdp:
product: zeek
service: rdp
conditions:
'@stream': rdp
zeek-rfb:
product: zeek
service: rfb
conditions:
'@stream': rfb
zeek-sip:
product: zeek
service: sip
conditions:
'@stream': sip
zeek-smb_files:
product: zeek
service: smb_files
conditions:
'@stream': smb_files
zeek-smb_mapping:
product: zeek
service: smb_mapping
conditions:
'@stream': smb_mapping
zeek-smtp:
product: zeek
service: smtp
conditions:
'@stream': smtp
zeek-smtp_links:
product: zeek
service: smtp_links
conditions:
'@stream': smtp_links
zeek-snmp:
product: zeek
service: snmp
conditions:
'@stream': snmp
zeek-socks:
product: zeek
service: socks
conditions:
'@stream': socks
zeek-software:
product: zeek
service: software
conditions:
'@stream': software
zeek-ssh:
product: zeek
service: ssh
conditions:
'@stream': ssh
zeek-ssl:
product: zeek
service: ssl
conditions:
'@stream': ssl
zeek-tls: # In case people call it TLS even though orig log is called ssl
product: zeek
service: tls
conditions:
'@stream': ssl
zeek-syslog:
product: zeek
service: syslog
conditions:
'@stream': syslog
zeek-tunnel:
product: zeek
service: tunnel
conditions:
'@stream': tunnel
zeek-traceroute:
product: zeek
service: traceroute
conditions:
'@stream': traceroute
zeek-weird:
product: zeek
service: weird
conditions:
'@stream': weird
zeek-x509:
product: zeek
service: x509
conditions:
'@stream': x509
zeek-ip_search:
product: zeek
service: network
conditions:
'@stream':
- conn
- conn_long
- dce_rpc
- dhcp
- dnp3
- dns
- ftp
- gquic
- http
- irc
- kerberos
- modbus
- mqtt_connect
- mqtt_publish
- mqtt_subscribe
- mysql
- ntlm
- ntp
- radius
- rfb
- sip
- smb_files
- smb_mapping
- smtp
- smtp_links
- snmp
- socks
- ssh
- tls #SSL
- tunnel
- weird
defaultindex: 'logstash-*'
fieldmappings:
# All Logs Applied Mapping & Taxonomy
dst_ip: id.resp_h
dst_port: id.resp_p
network_protocol: proto
src_ip: id.orig_h
src_port: id.orig_p
# DNS matching Taxonomy & DNS Category
answer: answers
#question_length: # Does not exist in open source version
record_type: qtype_name
#parent_domain: # Does not exist in open source version
# HTTP matching Taxonomy & Web/Proxy Category
cs-bytes: request_body_len
cs-cookie: cookie
r-dns: host
sc-bytes: response_body_len
sc-status: status_code
c-uri: uri
c-uri-extension: uri
c-uri-query: uri
c-uri-stem: uri
c-useragent: user_agent
cs-host: host
cs-method: method
cs-referrer: referrer
cs-version: version
# Temporary one off rule name fields
agent.version: version
c-cookie: cookie
c-ip: id.orig_h
cs-uri: uri
clientip: id.orig_h
clientIP: id.orig_h
dest_domain:
- query
- host
- server_name
dest_ip: id.resp_h
dest_port: id.resp_p
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname:
- query
- host
- server_name
DestinationAddress:
DestinationHostname:
- host
- query
- server_name
DestinationIp: id.resp_h
DestinationIP: id.resp_h
DestinationPort: id.resp_p
dst-ip: id.resp_h
dstip: id.resp_h
dstport: id.resp_p
Host:
- host
- query
- server_name
HostVersion: http.version
http_host:
- host
- query
- server_name
http_uri: uri
http_url: uri
http_user_agent: user_agent
http.request.url-query-params: uri
HttpMethod: method
in_url: uri
# parent_domain: # Not in open source zeek
post_url_parameter: uri
Request Url: uri
request_url: uri
request_URL: uri
RequestUrl: uri
#response: status_code
resource.url: uri
resource.URL: uri
sc_status: status_code
sender_domain:
- query
- server_name
service.response_code: status_code
source: id.orig_h
SourceAddr: id.orig_h
SourceAddress: id.orig_h
SourceIP: id.orig_h
SourceIp: id.orig_h
SourceNetworkAddress: id.orig_h
SourcePort: id.orig_p
srcip: id.orig_h
Status: status_code
status: status_code
url: uri
URL: uri
url_query: uri
url.query: uri
uri_path: uri
user_agent: user_agent
user_agent.name: user_agent
user-agent: user_agent
User-Agent: user_agent
useragent: user_agent
UserAgent: user_agent
User Agent: user_agent
web_dest:
- host
- query
- server_name
web.dest:
- host
- query
- server_name
Web.dest:
- host
- query
- server_name
web.host:
- host
- query
- server_name
Web.host:
- host
- query
- server_name
web_method: method
Web_method: method
web.method: method
Web.method: method
web_src: id.orig_h
web_status: status_code
Web_status: status_code
web.status: status_code
Web.status: status_code
web_uri: uri
web_url: uri
# Most are in ECS, but for things not using Elastic - these need renamed
destination.ip: id.resp_h
destination.port: id.resp_p
http.request.body.content: post_body
#source.domain:
source.ip: id.orig_h
source.port: id.orig_p
+625
View File
@@ -0,0 +1,625 @@
title: Humio log source conditions
order: 20
backends:
- humio
logsources:
zeek:
product: zeek
zeek-category-accounting:
category: accounting
rewrite:
product: zeek
service: syslog
zeek-category-firewall:
category: firewall
rewrite:
product: zeek
service: conn
zeek-category-dns:
category: dns
rewrite:
product: zeek
service: dns
zeek-category-proxy:
category: proxy
rewrite:
product: zeek
service: http
zeek-category-webserver:
category: webserver
rewrite:
product: zeek
service: http
zeek-conn:
product: zeek
service: conn
conditions:
'@stream': conn
zeek-conn_long:
product: zeek
service: conn_long
conditions:
'@stream': conn_long
zeek-dce_rpc:
product: zeek
service: dce_rpc
conditions:
'@stream': dce_rpc
zeek-dns:
product: zeek
service: dns
conditions:
'@stream': dns
zeek-dnp3:
product: zeek
service: dnp3
conditions:
'@stream': dnp3
zeek-dpd:
product: zeek
service: dpd
conditions:
'@stream': dpd
zeek-files:
product: zeek
service: files
conditions:
'@stream': files
zeek-ftp:
product: zeek
service: ftp
conditions:
'@stream': ftp
zeek-gquic:
product: zeek
service: gquic
conditions:
'@stream': gquic
zeek-http:
product: zeek
service: http
conditions:
'@stream': http
zeek-http2:
product: zeek
service: http2
conditions:
'@stream': http2
zeek-intel:
product: zeek
service: intel
conditions:
'@stream': intel
zeek-irc:
product: zeek
service: irc
conditions:
'@stream': irc
zeek-kerberos:
product: zeek
service: kerberos
conditions:
'@stream': kerberos
zeek-known_certs:
product: zeek
service: known_certs
conditions:
'@stream': known_certs
zeek-known_hosts:
product: zeek
service: known_hosts
conditions:
'@stream': known_hosts
zeek-known_modbus:
product: zeek
service: known_modbus
conditions:
'@stream': known_modbus
zeek-known_services:
product: zeek
service: known_services
conditions:
'@stream': known_services
zeek-modbus:
product: zeek
service: modbus
conditions:
'@stream': modbus
zeek-modbus_register_change:
product: zeek
service: modbus_register_change
conditions:
'@stream': modbus_register_change
zeek-mqtt_connect:
product: zeek
service: mqtt_connect
conditions:
'@stream': mqtt_connect
zeek-mqtt_publish:
product: zeek
service: mqtt_publish
conditions:
'@stream': mqtt_publish
zeek-mqtt_subscribe:
product: zeek
service: mqtt_subscribe
conditions:
'@stream': mqtt_subscribe
zeek-mysql:
product: zeek
service: mysql
conditions:
'@stream': mysql
zeek-notice:
product: zeek
service: notice
conditions:
'@stream': notice
zeek-ntlm:
product: zeek
service: ntlm
conditions:
'@stream': ntlm
zeek-ntp:
product: zeek
service: ntp
conditions:
'@stream': ntp
zeek-ocsp:
product: zeek
service: ntp
conditions:
'@stream': ocsp
zeek-pe:
product: zeek
service: pe
conditions:
'@stream': pe
zeek-pop3:
product: zeek
service: pop3
conditions:
'@stream': pop3
zeek-radius:
product: zeek
service: radius
conditions:
'@stream': radius
zeek-rdp:
product: zeek
service: rdp
conditions:
'@stream': rdp
zeek-rfb:
product: zeek
service: rfb
conditions:
'@stream': rfb
zeek-sip:
product: zeek
service: sip
conditions:
'@stream': sip
zeek-smb_files:
product: zeek
service: smb_files
conditions:
'@stream': smb_files
zeek-smb_mapping:
product: zeek
service: smb_mapping
conditions:
'@stream': smb_mapping
zeek-smtp:
product: zeek
service: smtp
conditions:
'@stream': smtp
zeek-smtp_links:
product: zeek
service: smtp_links
conditions:
'@stream': smtp_links
zeek-snmp:
product: zeek
service: snmp
conditions:
'@stream': snmp
zeek-socks:
product: zeek
service: socks
conditions:
'@stream': socks
zeek-software:
product: zeek
service: software
conditions:
'@stream': software
zeek-ssh:
product: zeek
service: ssh
conditions:
'@stream': ssh
zeek-ssl:
product: zeek
service: ssl
conditions:
'@stream': ssl
zeek-tls: # In case people call it TLS even though orig log is called ssl
product: zeek
service: tls
conditions:
'@stream': ssl
zeek-syslog:
product: zeek
service: syslog
conditions:
'@stream': syslog
zeek-tunnel:
product: zeek
service: tunnel
conditions:
'@stream': tunnel
zeek-traceroute:
product: zeek
service: traceroute
conditions:
'@stream': traceroute
zeek-weird:
product: zeek
service: weird
conditions:
'@stream': weird
zeek-x509:
product: zeek
service: x509
conditions:
'@stream': x509
zeek-ip_search:
product: zeek
service: network
conditions:
'@stream':
- conn
- conn_long
- dce_rpc
- dhcp
- dnp3
- dns
- ftp
- gquic
- http
- irc
- kerberos
- modbus
- mqtt_connect
- mqtt_publish
- mqtt_subscribe
- mysql
- ntlm
- ntp
- radius
- rfb
- sip
- smb_files
- smb_mapping
- smtp
- smtp_links
- snmp
- socks
- ssh
- tls #SSL
- tunnel
- weird
fieldmappings:
# Deep mappings Taxonomy for overall/general fields
dst_ip:
product=windows: winlog.event_data.DestinationIp
product=zeek: id.resp_h
src_ip:
product=windows: winlog.event_data.SourceIp
product=zeek: id.orig_h
dst_port:
product=windows: winlog.event_data.DestinationPort
product=zeek: id.resp_p
src_port:
product=windows: winlog.event_data.SourcePort
product=zeek: id.orig_p
network_protocol:
product=zeek: proto
# Deep mappings Taxonomy for DNS Category and DNS service
answer:
product=zeek: answers
#question_length: # product=zeek: # Does not exist in open source version
record_type:
product=zeek: qtype_name
#parent_domain: #product=zeek: # Does not exist in open source version
# Deep mappings Taxonomy for HTTP, Webserver category, and Proxy category
cs-bytes:
product=zeek: request_body_len
cs-cookie:
product=zeek: cookie
r-dns:
product=zeek: host
sc-bytes:
product=zeek: response_body_len
sc-status:
product=zeek: status_code
c-uri:
product=zeek: uri
c-uri-extension:
product=zeek: uri
c-uri-query:
product=zeek: uri
c-uri-stem:
product=zeek: uri
c-useragent:
product=zeek: user_agent
cs-host:
product=zeek: host
cs-method:
product=zeek: method
cs-referrer:
product=zeek: referrer
cs-version:
product=zeek: version
# Windows / WEF / Winlogbeat
EventID: winlog.event_id
Event_ID: winlog.event_id
eventId: winlog.event_id
event_id: winlog.event_id
event-id: winlog.event_id
eventid: winlog.event_id
AccessMask: winlog.event_data.AccessMask
AccountName: winlog.event_data.AccountName
AllowedToDelegateTo: winlog.event_data.AllowedToDelegateTo
AttributeLDAPDisplayName: winlog.event_data.AttributeLDAPDisplayName
AuditPolicyChanges: winlog.event_data.AuditPolicyChanges
AuthenticationPackageName: winlog.event_data.AuthenticationPackageName
CallingProcessName: winlog.event_data.CallingProcessName
CallTrace: winlog.event_data.CallTrace
Channel: winlog.channel
CommandLine: winlog.event_data.CommandLine
ComputerName: winlog.ComputerName
CurrentDirectory: winlog.event_data.CurrentDirectory
Description: winlog.event_data.Description
DestinationHostname: winlog.event_data.DestinationHostname
DestinationIp: winlog.event_data.DestinationIp
DestinationIsIpv6: winlog.event_data.DestinationIsIpv6
DestinationPort: winlog.event_data.DestinationPort
Details: winlog.event_data.Details
EngineVersion: winlog.event_data.EngineVersion
EventType: winlog.event_data.EventType
FailureCode: winlog.event_data.FailureCode
FileName: winlog.event_data.FileName
GrantedAccess: winlog.event_data.GrantedAccess
GroupName: winlog.event_data.GroupName
GroupSid: winlog.event_data.GroupSid
Hashes: winlog.event_data.Hashes
HiveName: winlog.event_data.HiveName
HostVersion: winlog.event_data.HostVersion
Image: winlog.event_data.Image
ImageLoaded: winlog.event_data.ImageLoaded
ImagePath: winlog.event_data.ImagePath
Imphash: winlog.event_data.Imphash
IpAddress: winlog.event_data.IpAddress
KeyLength: winlog.event_data.KeyLength
LogonProcessName: winlog.event_data.LogonProcessName
LogonType: winlog.event_data.LogonType
NewProcessName: winlog.event_data.NewProcessName
ObjectClass: winlog.event_data.ObjectClass
ObjectName: winlog.event_data.ObjectName
ObjectType: winlog.event_data.ObjectType
ObjectValueName: winlog.event_data.ObjectValueName
ParentCommandLine: winlog.event_data.ParentCommandLine
ParentProcessName: winlog.event_data.ParentProcessName
ParentImage: winlog.event_data.ParentImage
Path: winlog.event_data.Path
PipeName: winlog.event_data.PipeName
ProcessCommandLine: winlog.event_data.ProcessCommandLine
ProcessName: winlog.event_data.ProcessName
Properties: winlog.event_data.Properties
SecurityID: winlog.event_data.SecurityID
ServiceFileName: winlog.event_data.ServiceFileName
ServiceName: winlog.event_data.ServiceName
ShareName: winlog.event_data.ShareName
Signature: winlog.event_data.Signature
Source: winlog.event_data.Source
SourceImage: winlog.event_data.SourceImage
SourceIp: winlog.event_data.SourceIp
StartModule: winlog.event_data.StartModule
Status: winlog.event_data.Status
SubjectUserName: winlog.event_data.SubjectUserName
SubjectUserSid: winlog.event_data.SubjectUserSid
TargetFilename: winlog.event_data.TargetFilename
Targetfilename: winlog.event_data.TargetFilename
TargetImage: winlog.event_data.TargetImage
TargetObject: winlog.event_data.TargetObject
TicketEncryptionType: winlog.event_data.TicketEncryptionType
TicketOptions: winlog.event_data.TicketOptions
User: winlog.event_data.User
WorkstationName: winlog.event_data.WorkstationName
# Channel: WLAN-Autoconfig AND EventID: 8001
AuthenticationAlgorithm: winlog.event_data.AuthenticationAlgorithm
BSSID: winlog.event_data.BSSID
BSSType: winlog.event_data.BSSType
CipherAlgorithm: winlog.event_data.CipherAlgorithm
ConnectionId: winlog.event_data.ConnectionId
ConnectionMode: winlog.event_data.ConnectionMode
InterfaceDescription: winlog.event_data.InterfaceDescription
InterfaceGuid: winlog.event_data.InterfaceGuid
OnexEnabled: winlog.event_data.OnexEnabled
PHYType: winlog.event_data.PHYType
ProfileName: winlog.event_data.ProfileName
SSID: winlog.event_data.SSID
# Zeek Deep Mappings
# Temporary one off rule name fields
agent.version:
product=zeek: version
c-cookie:
product=zeek: cookie
c-ip:
product=zeek: id.orig_h
cs-uri:
product=zeek: uri
clientip:
product=zeek: id.orig_h
clientIP:
product=zeek: id.orig_h
dest_domain:
product=zeek: host
#- query
#- server_name
dest_ip:
product=zeek: id.resp_h
dest_port:
product=zeek: id.resp_p
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname:
product=zeek: host
#- query
#- server_name
DestinationAddress:
product=zeek: id.resp_h
dst-ip:
product=zeek: id.resp_h
dstip:
product=zeek: id.resp_h
dstport:
product=zeek: id.resp_p
Host:
product=zeek: host
#- query
#- server_name
http_host:
product=zeek: host
#- query
#- server_name
http_uri:
product=zeek: uri
http_url:
product=zeek: uri
http_user_agent:
product=zeek: user_agent
http.request.url-query-params:
product=zeek: uri
HttpMethod:
product=zeek: method
in_url:
product=zeek: uri
post_url_parameter:
product=zeek: uri
Request Url:
product=zeek: uri
request_url:
product=zeek: uri
request_URL:
product=zeek: uri
RequestUrl:
product=zeek: uri
response:
product=zeek: status_code
resource.url:
product=zeek: uri
resource.URL:
product=zeek: uri
sc_status:
product=zeek: status_code
service.response_code:
product=zeek: status_code
source:
product=zeek: id.orig_h
SourceAddr:
product=zeek: id.orig_h
SourceAddress:
product=zeek: id.orig_h
SourceIP:
product=zeek: id.orig_h
SourceNetworkAddress:
product=zeek: id.orig_h
SourcePort:
product=zeek: id.orig_p
srcip:
product=zeek: id.orig_h
status:
product=zeek: status_code
url:
product=zeek: uri
URL:
product=zeek: uri
url_query:
product=zeek: uri
url.query:
product=zeek: uri
uri_path:
product=zeek: uri
user_agent:
product=zeek: user_agent
user_agent.name:
product=zeek: user_agent
user-agent:
product=zeek: user_agent
User-Agent:
product=zeek: user_agent
useragent:
product=zeek: user_agent
UserAgent:
product=zeek: user_agent
User Agent:
product=zeek: user_agent
web_dest:
product=zeek: host
#- query
#- server_name
web.dest:
product=zeek: host
#- query
#- server_name
Web.dest:
product=zeek: host
#- query
#- server_name
web.host:
product=zeek: host
#- query
#- server_name
Web.host:
product=zeek: host
#- query
#- server_name
web_method:
product=zeek: method
Web_method:
product=zeek: method
web.method:
product=zeek: method
Web.method:
product=zeek: method
web_src:
product=zeek: id.orig_h
web_status:
product=zeek: status_code
Web_status:
product=zeek: status_code
web.status:
product=zeek: status_code
Web.status:
product=zeek: status_code
web_uri:
product=zeek: uri
web_url:
product=zeek: uri
# Already
destination.ip:
product=zeek: id.resp_h
destination.port:
product=zeek: id.resp_p
http.request.body.content:
product=zeek: post_body
#source.domain:
source.ip:
product=zeek: id.orig_h
source.port:
product=zeek: id.orig_p
+108 -1
View File
@@ -363,4 +363,111 @@ fieldmappings:
- host
- server_name
dest_ip: id.resp_h
dest_port: id.resp_p
dest_port: id.resp_p
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname:
- query
- host
- server_name
DestinationAddress: id.resp_h
DestinationHostname:
- host
- query
- server_name
DestinationIp: id.resp_h
DestinationIP: id.resp_h
DestinationPort: id.resp_p
dst-ip: id.resp_h
dstip: id.resp_h
dstport: id.resp_p
Host:
- host
- query
- server_name
HostVersion: http.version
http_host:
- host
- query
- server_name
http_uri: uri
http_url: uri
http_user_agent: user_agent
http.request.url-query-params: uri
HttpMethod: method
in_url: uri
# parent_domain: # Not in open source zeek
post_url_parameter: uri
Request Url: uri
request_url: uri
request_URL: uri
RequestUrl: uri
#response: status_code
resource.url: uri
resource.URL: uri
sc_status: status_code
sender_domain:
- query
- server_name
service.response_code: status_code
source: id.orig_h
SourceAddr: id.orig_h
SourceAddress: id.orig_h
SourceIP: id.orig_h
SourceIp: id.orig_h
SourceNetworkAddress: id.orig_h
SourcePort: id.orig_p
srcip: id.orig_h
Status: status_code
status: status_code
url: uri
URL: uri
url_query: uri
url.query: uri
uri_path: uri
user_agent: user_agent
user_agent.name: user_agent
user-agent: user_agent
User-Agent: user_agent
useragent: user_agent
UserAgent: user_agent
User Agent: user_agent
web_dest:
- host
- query
- server_name
web.dest:
- host
- query
- server_name
Web.dest:
- host
- query
- server_name
web.host:
- host
- query
- server_name
Web.host:
- host
- query
- server_name
web_method: method
Web_method: method
web.method: method
Web.method: method
web_src: id.orig_h
web_status: status_code
Web_status: status_code
web.status: status_code
Web.status: status_code
web_uri: uri
web_url: uri
# Most are in ECS, but for things not using Elastic - these need renamed
destination.ip: id.resp_h
destination.port: id.resp_p
http.request.body.content: post_body
#source.domain:
source.ip: id.orig_h
source.port: id.orig_p
+62
View File
@@ -0,0 +1,62 @@
logsources:
windows-application:
product: windows
service: application
conditions:
LogName: 'Application'
windows-security:
product: windows
service: security
conditions:
LogName: 'Security'
windows-system:
product: windows
service: system
conditions:
LogName: 'System'
windows-sysmon:
product: windows
service: sysmon
conditions:
LogName: 'Microsoft-Windows-Sysmon/Operational'
windows-powershell:
product: windows
service: powershell
conditions:
LogName: 'Microsoft-Windows-PowerShell/Operational'
windows-classicpowershell:
product: windows
service: powershell-classic
conditions:
LogName: 'Windows PowerShell'
windows-taskscheduler:
product: windows
service: taskscheduler
conditions:
LogName: 'Microsoft-Windows-TaskScheduler/Operational'
windows-wmi:
product: windows
service: wmi
conditions:
LogName: 'Microsoft-Windows-WMI-Activity/Operational'
windows-dns-server:
product: windows
service: dns-server
category: dns
conditions:
LogName: 'DNS Server'
windows-dns-server-audit:
product: windows
service: dns-server-audit
conditions:
LogName: 'Microsoft-Windows-DNS-Server/Audit'
windows-driver-framework:
product: windows
service: driver-framework
conditions:
LogName: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational'
windows-ntlm:
product: windows
service: ntlm
conditions:
LogName: 'Microsoft-Windows-NTLM/Operational'
+93 -47
View File
@@ -1,52 +1,98 @@
title: QRadar
backends:
- qradar
- qradar
order: 20
logsources:
apache:
product: apache
conditions:
LOGSOURCETYPENAME(devicetype): ilike '%apache%'
windows:
product: windows
conditions:
LOGSOURCETYPENAME(devicetype): 'Microsoft Windows Security Event Log'
qflow:
product: qflow
index: flows
netflow:
product: netflow
index: flows
ipfix:
product: ipfix
index: flows
flow:
category: flow
index: flows
apache:
product: apache
index: apache
conditions:
LOGSOURCETYPENAME(devicetype): '*apache*'
windows:
product: windows
index: windows
conditions:
LOGSOURCETYPENAME(devicetype): '*Microsoft Windows Security Event Log*'
qflow:
product: qflow
index: flows
netflow:
product: netflow
index: flows
ipfix:
product: ipfix
index: flows
flow:
category: flow
index: flows
fieldmappings:
EventID:
- Event ID Code
dst:
- destinationIP
dst_ip:
- destinationIP
src:
- sourceIP
src_ip:
- sourceIP
c-ip: sourceIP
cs-ip: sourceIP
c-uri: url
c-uri-extension: file_extension
c-useragent: user_agent
c-uri-query: uri_query
cs-method: Method
r-dns: FQDN
ClientIP: sourceIP
ServiceFileName: Service Name
event_id: EventID
EventID: EventID
dst: destinationip
dst_ip: destinationip
src: sourceip
src_ip: sourceip
c-ip: sourceip
cs-ip: sourceip
c-uri: URL
c-uri-extension: URL
c-useragent: user_agent
c-uri-query: uri_query
cs-method: Method
r-dns: FQDN
ClientIP: sourceip
ServiceFileName: ServiceFileName
event_data.CommandLine: Process CommandLine
CommandLine: Process CommandLine
file_hash: File Hash
hash: File Hash
#Message: search_payload
Event-ID: EventID
Event_ID: EventID
eventId: EventID
event-id: EventID
eventid: EventID
hashes: File Hash
url.query: URL
resource.URL: URL
event_data.CallingProcessName: CallingProcessName
event_data.ComputerName: Hostname/HOSTNAME
ComputerName: Hostname/HOSTNAME
event_data.DestinationHostname: Hostname/HOSTNAME
DestinationHostname: Hostname/HOSTNAME
event_data.DestinationIp: destinationip
event_data.DestinationPort: destinationip
event_data.Details: Target Details
Details: Target Details
event_data.FileName: Filename
event_data.Hashes: File Hash
Hashes: File Hash
event_data.Image: Image
event_data.ImageLoaded: LoadedImage
event_data.ImagePath: SourceImage
ImagePath: Image
event_data.Imphash: IMP Hash
Imphash: IMP Hash
event_data.ParentCommandLine: ParentCommandLine
event_data.ParentImage: ParentImage
event_data.ParentProcessName: ParentImageName
event_data.Path: File Path
Path: File Path
event_data.PipeName: PipeName
event_data.ProcessCommandLine: Process CommandLine
ProcessCommandLine: Process CommandLine
event_data.ServiceFileName: ServiceFileName
event_data.ShareName: ShareName
event_data.Signature: Signature
event_data.SourceImage: SourceImage
event_data.StartModule: StartModule
event_data.SubjectUserName: username
event_data.SubjectUserSid: SubjectUserSid
event_data.TargetFilename: Filename
TargetFilename: Filename
event_data.TargetImage: TargetImage
TargetImage: TargetImage
event_data.TicketOptions: TicketOptions
event_data.User: username
User: username
user: username
+124 -1
View File
@@ -343,4 +343,127 @@ fieldmappings:
id_orig_h: id.orig_h
id_orig_p: id.orig_p
id_resp_h: id.resp_h
id_resp_p: id.resp_p
id_resp_p: id.resp_p
# Temporary one off rule name fields
agent.version: version
c-cookie: cookie
c-ip: id.orig_h
cs-uri: uri
clientip: id.orig_h
clientIP: id.orig_h
dest_domain:
- query
- host
- server_name
dest_ip: id.resp_h
dest_port: id.resp_p
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname:
- query
- host
- server_name
DestinationAddress: id.resp_h
DestinationHostname:
- host
- query
- server_name
DestinationIp: id.resp_h
DestinationIP: id.resp_h
DestinationPort: id.resp_p
dst-ip: id.resp_h
dstip: id.resp_h
dstport: id.resp_p
Host:
- host
- query
- server_name
HostVersion: http.version
http_host:
- host
- query
- server_name
http_uri: uri
http_url: uri
http_user_agent: user_agent
http.request.url-query-params: uri
HttpMethod: method
in_url: uri
# parent_domain: # Not in open source zeek
post_url_parameter: uri
Request Url: uri
request_url: uri
request_URL: uri
RequestUrl: uri
#response: status_code
resource.url: uri
resource.URL: uri
sc_status: status_code
sender_domain:
- query
- server_name
service.response_code: status_code
source: id.orig_h
SourceAddr: id.orig_h
SourceAddress: id.orig_h
SourceIP: id.orig_h
SourceIp: id.orig_h
SourceNetworkAddress: id.orig_h
SourcePort: id.orig_p
srcip: id.orig_h
Status: status_code
status: status_code
url: uri
URL: uri
url_query: uri
url.query: uri
uri_path: uri
user_agent: user_agent
user_agent.name: user_agent
user-agent: user_agent
User-Agent: user_agent
useragent: user_agent
UserAgent: user_agent
User Agent: user_agent
web_dest:
- host
- query
- server_name
web.dest:
- host
- query
- server_name
Web.dest:
- host
- query
- server_name
web.host:
- host
- query
- server_name
Web.host:
- host
- query
- server_name
web_method: method
Web_method: method
web.method: method
Web.method: method
web_src: id.orig_h
web_status: status_code
Web_status: status_code
web.status: status_code
Web.status: status_code
web_uri: uri
web_url: uri
# Most are in ECS, but for things not using Elastic - these need renamed
destination.ip: id.resp_h
destination.port: id.resp_p
http.request.body.content: post_body
source.domain:
- host
- query
- server_name
source.ip: id.orig_h
source.port: id.orig_p
+2 -3
View File
@@ -60,8 +60,7 @@ fieldmappings:
CallTrace: winlog.event_data.CallTrace
Channel: winlog.channel
CommandLine: process.args
ComputerName: winlog.computer_name
ContextInfo: winlog.event_data.ContextInfo
ComputerName: winlog.ComputerName
CurrentDirectory: process.working_directory
Description: winlog.event_data.Description
DestinationHostname: destination.domain
@@ -84,6 +83,7 @@ fieldmappings:
- group.id
- winlog.event_data.GroupSid
Hashes: winlog.event_data.Hashes
file_hash: winlog.event_data.Hashes
HiveName: winlog.event_data.HiveName
HostVersion: winlog.event_data.HostVersion
Image: process.executable
@@ -95,7 +95,6 @@ fieldmappings:
KeyLength: winlog.event_data.KeyLength
LogonProcessName: winlog.event_data.LogonProcessName
LogonType: winlog.event_data.LogonType
Message: winlog.event_data.Message
NewProcessName: winlog.event_data.NewProcessName
ObjectClass: winlog.event_data.ObjectClass
ObjectName: winlog.event_data.ObjectName
+1 -3
View File
@@ -59,8 +59,7 @@ fieldmappings:
CallTrace: event_data.CallTrace
Channel: winlog.channel
CommandLine: event_data.CommandLine
ComputerName: computer_name
ContextInfo: event_data.ContextInfo
ComputerName: event_data.ComputerName
CurrentDirectory: event_data.CurrentDirectory
Description: event_data.Description
DestinationHostname: event_data.DestinationHostname
@@ -86,7 +85,6 @@ fieldmappings:
KeyLength: event_data.KeyLength
LogonProcessName: event_data.LogonProcessName
LogonType: event_data.LogonType
Message: event_data.Message
NewProcessName: event_data.NewProcessName
ObjectClass: event_data.ObjectClass
ObjectName: event_data.ObjectName
+2 -4
View File
@@ -59,8 +59,7 @@ fieldmappings:
CallTrace: winlog.event_data.CallTrace
Channel: winlog.channel
CommandLine: winlog.event_data.CommandLine
ComputerName: winlog.computer_name
ContextInfo: winlog.event_data.ContextInfo
ComputerName: winlog.ComputerName
CurrentDirectory: winlog.event_data.CurrentDirectory
Description: winlog.event_data.Description
DestinationHostname: winlog.event_data.DestinationHostname
@@ -88,7 +87,6 @@ fieldmappings:
KeyLength: winlog.event_data.KeyLength
LogonProcessName: winlog.event_data.LogonProcessName
LogonType: winlog.event_data.LogonType
Message: winlog.event_data.Message
NewProcessName: winlog.event_data.NewProcessName
ObjectClass: winlog.event_data.ObjectClass
ObjectName: winlog.event_data.ObjectName
@@ -137,4 +135,4 @@ fieldmappings:
OnexEnabled: winlog.event_data.OnexEnabled
PHYType: winlog.event_data.PHYType
ProfileName: winlog.event_data.ProfileName
SSID: winlog.event_data.SSID
SSID: winlog.event_data.SSID
+171 -99
View File
@@ -13,22 +13,49 @@
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re, json
import os
import sys
import re
import json
import xml.etree.ElementTree as xml
from ..config.mapping import (
from sigma.config.mapping import (
SimpleFieldMapping, MultiFieldMapping, ConditionalFieldMapping
)
from ..parser.condition import SigmaAggregationParser
from ..parser.exceptions import SigmaParseError
from ..parser.modifiers.type import SigmaRegularExpressionModifier
from .base import SingleTextQueryBackend
from sigma.parser.condition import SigmaAggregationParser
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
from sigma.backends.base import SingleTextQueryBackend
from sigma.parser.modifiers.base import SigmaTypeModifier
from sigma.parser.modifiers.transform import SigmaContainsModifier, SigmaStartswithModifier, SigmaEndswithModifier
from .data import sysmon_schema
from .exceptions import NotSupportedError
class AzureLogAnalyticsBackend(SingleTextQueryBackend):
class DeepFieldMappingMixin(object):
def fieldNameMapping(self, fieldname, value):
if isinstance(fieldname, str):
get_config = self.sigmaconfig.fieldmappings.get(fieldname)
if not get_config and '|' in fieldname:
fieldname = fieldname.split('|', 1)[0]
get_config = self.sigmaconfig.fieldmappings.get(fieldname)
if isinstance(get_config, ConditionalFieldMapping):
condition = self.sigmaconfig.fieldmappings.get(fieldname).conditions
for key, item in self.logsource.items():
if condition.get(key) and condition.get(key, {}).get(item):
new_fieldname = condition.get(key, {}).get(item)
if any(new_fieldname):
return super().fieldNameMapping(new_fieldname[0], value)
return super().fieldNameMapping(fieldname, value)
def generate(self, sigmaparser):
self.logsource = sigmaparser.parsedyaml.get("logsource", {})
return super().generate(sigmaparser)
class AzureLogAnalyticsBackend(DeepFieldMappingMixin, SingleTextQueryBackend):
"""Converts Sigma rule into Azure Log Analytics Queries."""
identifier = "ala"
active = True
@@ -43,8 +70,7 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
)
config_required = False
reEscape = re.compile('("|(?<!\\\\)\\\\(?![*?\\\\]))')
reClear = None
reEscape = re.compile('(\\\|"|(?<!)(?![*?]))')
andToken = " and "
orToken = " or "
notToken = "not "
@@ -57,13 +83,17 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
mapExpression = "%s == %s"
mapListsSpecialHandling = True
mapListValueExpression = "%s in %s"
_WIN_SECURITY_EVENT_MAP = {
"Image": "NewProcessName",
"ParentImage": "ParentProcessName",
"User": "SubjectUserName",
typedValueExpression = {
SigmaRegularExpressionModifier: "matches regex \"(?i)%s\"",
SigmaContainsModifier: "contains \"%s\""
}
# _WIN_SECURITY_EVENT_MAP = {
# "Image": "NewProcessName",
# "ParentImage": "ParentProcessName",
# "User": "SubjectUserName",
# }
def __init__(self, *args, **kwargs):
"""Initialize field mappings."""
super().__init__(*args, **kwargs)
@@ -77,14 +107,9 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
self._agg_var = None
self._has_logsource_event_cond = False
if not self.sysmon and not self.sigmaconfig.config:
self._field_map = self._WIN_SECURITY_EVENT_MAP
self._field_map = {}#self._WIN_SECURITY_EVENT_MAP
else:
self._field_map = {}
self.typedValueExpression[SigmaRegularExpressionModifier] = "matches regex \"%s\""
def id_mapping(self, src):
"""Identity mapping, source == target field name"""
return src
def map_sysmon_schema(self, eventid):
schema_keys = []
@@ -105,48 +130,36 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
def default_value_mapping(self, val):
op = "=="
if isinstance(val, str):
if "*" in val[1:-1]: # value contains * inside string - use regex match
if "*" in val[1:-1]: # value contains * inside string - use regex match
op = "matches regex"
val = re.sub('([".^$]|\\\\(?![*?]))', '\\\\\g<1>', val)
val = re.sub('\\*', '.*', val)
if "\\" in val:
return "%s \"(?i)%s\"" % (op, val)
return "%s \"(?i)%s\"" % (op, val)
elif val.startswith("*") or val.endswith("*"):
op = "contains"
val = re.sub('([".^$]|(?![*?]))', '\g<1>', val)
val = re.sub('\\*', '', val)
val = re.sub('\\?', '.', val)
if "\\" in val:
return "%s @\"%s\"" % (op, val)
else: # value possibly only starts and/or ends with *, use prefix/postfix match
if val.endswith("*") and val.startswith("*"):
op = "contains"
val = self.cleanValue(val[1:-1])
elif val.endswith("*"):
op = "startswith"
val = self.cleanValue(val[:-1])
elif val.startswith("*"):
op = "endswith"
val = self.cleanValue(val[1:])
if "\\" in val:
return "%s @\"%s\"" % (op, val)
# if "\\" in val:
# return "%s @\"%s\"" % (op, val)
return "%s \"%s\"" % (op, val)
# elif "\\" in val:
# return "%s @\"%s\"" % (op, val)
return "%s \"%s\"" % (op, val)
def generate(self, sigmaparser):
self.table = None
try:
self.category = sigmaparser.parsedyaml['logsource'].setdefault('category', None)
self.product = sigmaparser.parsedyaml['logsource'].setdefault('product', None)
self.service = sigmaparser.parsedyaml['logsource'].setdefault('service', None)
except KeyError:
self.category = None
self.product = None
self.service = None
self.category = sigmaparser.parsedyaml['logsource'].setdefault('category', None)
self.product = sigmaparser.parsedyaml['logsource'].setdefault('product', None)
self.service = sigmaparser.parsedyaml['logsource'].setdefault('service', None)
detection = sigmaparser.parsedyaml.get("detection", {})
is_parent_cmd = False
if "keywords" in detection.keys():
return super().generate(sigmaparser)
if self.category == "process_creation":
self.table = "SysmonEvent"
self.table = "SecurityEvent"
self.eventid = "1"
elif self.service == "security":
self.table = "SecurityEvent"
@@ -154,6 +167,12 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
self.table = "SysmonEvent"
elif self.service == "powershell":
self.table = "Event"
elif self.service == "office365":
self.table = "OfficeActivity"
elif self.service == "azuread":
self.table = "AuditLogs"
elif self.service == "azureactivity":
self.table = "AzureActivity"
else:
if self.service:
if "-" in self.service:
@@ -181,8 +200,8 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
elif self.sysmon:
parse_string = self.map_sysmon_schema(self.eventid)
before = "%s | parse EventData with * %s | where " % (self.table, parse_string)
elif self.category == "process_creation" and not self._has_logsource_event_cond:
before = "%s | where EventID == \"%s\" | where " % (self.table, self.eventid)
# elif self.category == "process_creation" and not self._has_logsource_event_cond:
# before = "%s | where EventID == \"%s\" | where " % (self.table, self.eventid)
else:
before = "%s | where " % self.table
return before
@@ -193,6 +212,7 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
and creates an appropriate table reference.
"""
key, value = node
key = self.fieldNameMapping(key, value)
if type(value) == list: # handle map items with values list like multiple OR-chained conditions
return "(" + self.generateORNode(
[(key, v) for v in value]
@@ -207,17 +227,26 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
self.table = "SecurityEvent"
elif self.service == "system":
self.table = "Event"
elif type(value) in (str, int): # default value processing
mapping = (key, self.default_value_mapping)
return self.mapExpression % (key, value)
elif type(value) in [SigmaTypeModifier, SigmaContainsModifier, SigmaRegularExpressionModifier, SigmaStartswithModifier, SigmaEndswithModifier]:
return self.generateMapItemTypedNode(key, value)
elif type(value) in (str, int): # default value processing'
#default_filters = ["endswith", "contains", "startswith", "re"]
# if any([item for item in default_filters if item in key]):
# key = re.sub(key, default_filters, "")
# return self.regexExpression % (key, self.cleanValue(value))
# else:
# value_mapping = self.default_value_mapping
value_mapping = self.default_value_mapping
mapping = (key, value_mapping)
if len(mapping) == 1:
mapping = mapping[0]
if type(mapping) == str:
return mapping
elif callable(mapping):
conds = mapping(key, value)
return self.generateSubexpressionNode(
self.generateANDNode(
[cond for cond in mapping(key, value)]
[cond for cond in mapping(key, self.cleanValue(value))]
)
)
elif len(mapping) == 2:
@@ -226,12 +255,29 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
if type(mapitem) == str:
result.append(mapitem)
elif callable(mapitem):
result.append(mapitem(val))
result.append(mapitem(self.cleanValue(val)))
return "{} {}".format(*result)
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
elif type(value) == list:
return self.generateMapItemListNode(key, value)
return super().generateMapItemNode(node)
elif value is None:
return self.nullExpression % (key, )
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
def generateMapItemTypedNode(self, fieldname, value):
return "%s %s" % (fieldname, self.generateTypedValueNode(value))
def generateTypedValueNode(self, node):
try:
val = str(node)
if "*" in val:
val = re.sub('\\*', '.*', val)
return self.typedValueExpression[type(node)] % (val)
except KeyError:
raise NotImplementedError("Type modifier '{}' is not supported by backend".format(node.identifier))
def generateAggregation(self, agg):
if agg is None:
@@ -269,36 +315,6 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
)
)
def generateAfter(self, parsed):
del parsed
if self._fields:
all_fields = list(self._fields)
if self._agg_var:
all_fields = set(all_fields + [self._agg_var])
project_fields = self._map_fields(all_fields)
project_list = ", ".join(str(fld) for fld in set(project_fields))
return " | project " + project_list
return ""
def _map_fields(self, fields):
for field in fields:
mapped_field = self._map_field(field)
if isinstance(mapped_field, str):
yield mapped_field
elif isinstance(mapped_field, list):
for subfield in mapped_field:
yield subfield
def _map_field(self, fieldname):
mapping = self.sigmaconfig.fieldmappings.get(fieldname)
if isinstance(mapping, ConditionalFieldMapping):
fieldname = self._map_conditional_field(fieldname)
elif isinstance(mapping, MultiFieldMapping):
fieldname = mapping.resolve_fieldname(fieldname, self._parser)
elif isinstance(mapping, SimpleFieldMapping):
fieldname = mapping.resolve_fieldname(fieldname, self._parser)
return fieldname
def _map_conditional_field(self, fieldname):
mapping = self.sigmaconfig.fieldmappings.get(fieldname)
# if there is a conditional mapping for this fieldname
@@ -325,35 +341,89 @@ class AzureAPIBackend(AzureLogAnalyticsBackend):
def __init__(self, *args, **kwargs):
"""Initialize field mappings"""
super().__init__(*args, **kwargs)
self.techniques = self._load_mitre_file("techniques")
def create_rule(self, config):
tags = config.get("tags", [])
def find_technique(self, key_ids):
for key_id in set(key_ids):
if not key_id:
continue
for technique in self.techniques:
if key_id == technique.get("technique_id", ""):
yield technique
def _load_mitre_file(self, mitre_type):
try:
backend_dir = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "config", "mitre"))
path = os.path.join(backend_dir, "{}.json".format(mitre_type))
with open(path) as config_file:
config = json.load(config_file)
return config
except (IOError, OSError) as e:
print("Failed to open {} configuration file '%s': %s".format(path, str(e)), file=sys.stderr)
return []
except json.JSONDecodeError as e:
print("Failed to parse {} configuration file '%s' as valid YAML: %s" % (path, str(e)), file=sys.stderr)
return []
def skip_tactics_or_techniques(self, src_technics, src_tactics):
tactics = set()
technics = set()
local_storage_techniques = {item["technique_id"]: item for item in self.find_technique(src_technics)}
for key_id in src_technics:
src_tactic = local_storage_techniques.get(key_id, {}).get("tactic")
if not src_tactic:
continue
src_tactic = set(src_tactic)
for item in src_tactics:
if item in src_tactic:
technics.add(key_id)
tactics.add(item)
return sorted(tactics), sorted(technics)
def parse_severity(self, old_severity):
if old_severity.lower() == "critical":
return "high"
return old_severity
def get_tactics_and_techniques(self, tags):
tactics = list()
technics = list()
for tag in tags:
tag = tag.replace("attack.", "")
if re.match("[tT][0-9]{4}", tag):
if re.match("[t][0-9]{4}", tag, re.IGNORECASE):
technics.append(tag.title())
else:
if "_" in tag:
tag_list = tag.split("_")
tag_list = [item.title() for item in tag_list]
tactics.append("".join(tag_list))
else:
tactics.append(tag.title())
tag = tag.replace("_", " ")
tag = tag.title()
tactics.append(tag)
return tactics, technics
def create_rule(self, config):
tags = config.get("tags", [])
tactics, technics = self.get_tactics_and_techniques(tags)
tactics, technics = self.skip_tactics_or_techniques(technics, tactics)
tactics = list(map(lambda s: s.replace(" ", ""), tactics))
rule = {
"displayName": "{} by {}".format(config.get("title"), config.get('author')),
"description": "{} {}".format(config.get("description"), "Technique: {}.".format(",".join(technics))),
"severity": config.get("level", "medium"),
"severity": self.parse_severity(config.get("level", "medium")),
"enabled": True,
"query": config.get("translation"),
"queryFrequency": "12H",
"queryPeriod": "12H",
"triggerOperator": "GreaterThan",
"triggerThreshold": 1,
"triggerThreshold": 0,
"suppressionDuration": "12H",
"suppressionEnabled": False,
"suppressionEnabled": True,
"tactics": tactics
}
return json.dumps(rule)
@@ -365,3 +435,5 @@ class AzureAPIBackend(AzureLogAnalyticsBackend):
configs.update({"translation": translation})
rule = self.create_rule(configs)
return rule
else:
raise NotSupportedError("No table could be determined from Sigma rule")
+1 -1
View File
@@ -222,7 +222,7 @@ class ArcSightESMBackend(SingleTextQueryBackend):
elif isinstance(value, str) and value.endswith("*"):
return self.startsWithExpression % (key, self.generateValueNode(self.CleanNode(value)))
else:
return self.generateValueNode(value)
return self.mapExpression % (key, self.generateValueNode(value))
elif isinstance(value, list):
new_value = list()
for item in value:
+1
View File
@@ -20,6 +20,7 @@ import sigma
import yaml
import re
from sigma.backends.exceptions import NotSupportedError
from .mixins import RulenameCommentMixin, QuoteCharMixin
from sigma.parser.modifiers.base import SigmaTypeModifier
+3 -4
View File
@@ -2,7 +2,7 @@ import re
import requests
import json
import os
from ..config.eventdict import event
from sigma.config.eventdict import event
from fnmatch import fnmatch
from sigma.backends.base import SingleTextQueryBackend
@@ -83,7 +83,7 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB
if val.startswith("*"):
val = val.replace("*", "",1)
if val.startswith("\\"):
val = val.replace("\\", "", 1)
val = val.replace("\\", "", 1)
if val.startswith("*\\"):
val = val.replace("*\\", "*")
if val.startswith("*/"):
@@ -108,7 +108,7 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB
elif type(new_value) is list:
for index, vl in enumerate(new_value):
new_value[index] = self.cleanIPRange(vl)
return new_value
def generateValueNode(self, node):
@@ -131,7 +131,6 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB
else:
transformed_fieldname = self.fieldNameMapping(fieldname, value)
if(transformed_fieldname == "ipaddr"):
print("OK")
value = self.cleanIPRange(value)
if self.mapListsSpecialHandling == False and type(value) in (str, int, list) or self.mapListsSpecialHandling == True and type(value) in (str, int):
#return self.mapExpression % (transformed_fieldname, self.generateNode(value))
+1 -1
View File
@@ -25,7 +25,7 @@ from sigma.tools import getAllSubclasses, getClassDict
def getBackendList():
"""Return list of backend classes"""
path = os.path.dirname(__file__)
return frozenset(getAllSubclasses(path, "backends", BaseBackend))
return getAllSubclasses(path, "backends", BaseBackend)
def getBackendDict():
return getClassDict(getBackendList())
+74 -9
View File
@@ -23,12 +23,39 @@ from random import randrange
import sigma
import yaml
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier, SigmaTypeModifier
from sigma.parser.condition import ConditionOR, ConditionAND, NodeSubexpression
from sigma.config.mapping import ConditionalFieldMapping
from .base import BaseBackend, SingleTextQueryBackend
from .mixins import RulenameCommentMixin, MultiRuleOutputMixin
from .exceptions import NotSupportedError
class DeepFieldMappingMixin(object):
def fieldNameMapping(self, fieldname, value):
if isinstance(fieldname, str):
get_config = self.sigmaconfig.fieldmappings.get(fieldname)
if not get_config and '|' in fieldname:
fieldname = fieldname.split('|', 1)[0]
get_config = self.sigmaconfig.fieldmappings.get(fieldname)
if isinstance(get_config, ConditionalFieldMapping):
condition = self.sigmaconfig.fieldmappings.get(fieldname).conditions
for key, item in self.logsource.items():
if condition.get(key) and condition.get(key, {}).get(item):
new_fieldname = condition.get(key, {}).get(item)
if any(new_fieldname):
return super().fieldNameMapping(new_fieldname[0], value)
return super().fieldNameMapping(fieldname, value)
def generate(self, sigmaparser):
self.logsource = sigmaparser.parsedyaml.get("logsource", {})
return super().generate(sigmaparser)
class ElasticsearchWildcardHandlingMixin(object):
"""
Determine field mapping to keyword subfields depending on existence of wildcards in search values. Further,
@@ -86,6 +113,31 @@ class ElasticsearchWildcardHandlingMixin(object):
else:
return False
def generateMapItemNode(self, node):
fieldname, value = node
if fieldname.lower().find("hash") != -1:
if isinstance(value, list):
res = []
for item in value:
try:
res.extend([item.lower(), item.upper()])
except AttributeError: # not a string (something that doesn't support upper/lower casing)
res.append(item)
value = res
elif isinstance(value, str):
value = [value.upper(), value.lower()]
transformed_fieldname = self.fieldNameMapping(fieldname, value)
if self.mapListsSpecialHandling == False and type(value) in (str, int, list) or self.mapListsSpecialHandling == True and type(value) in (str, int):
return self.mapExpression % (transformed_fieldname, self.generateNode(value))
elif type(value) == list:
return self.generateMapItemListNode(transformed_fieldname, value)
elif isinstance(value, SigmaTypeModifier):
return self.generateMapItemTypedNode(transformed_fieldname, value)
elif value is None:
return self.nullExpression % (transformed_fieldname, )
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
def fieldNameMapping(self, fieldname, value, *agg_option):
"""
Decide whether to use a keyword field or analyzed field. Using options on fields to make into keywords OR not and the field naming of keyword.
@@ -162,6 +214,8 @@ class ElasticsearchWildcardHandlingMixin(object):
Adds the beginning and ending '/' to make regex query if still determined that it should be a regex
"""
if value and not value == 'null' and not re.match(r'^/.*/$', value) and (re.search('[a-zA-Z]', value) and not re.match(self.uuid_regex, value) or self.containsWildcard(value)): # re.search for alpha is fastest:
# Turn single ending '\\' into non escaped (ie: '\\*')
#value = re.sub( r"((?<!\\)(\\))\*$", "\g<1>\\*", value )
# Make upper/lower
value = re.sub( r"[A-Za-z]", lambda x: "[" + x.group( 0 ).upper() + x.group( 0 ).lower() + "]", value )
# Turn `*` into wildcard, only if odd number of '\'(because this would mean already escaped)
@@ -180,7 +234,7 @@ class ElasticsearchWildcardHandlingMixin(object):
return { 'is_regex': False, 'value': value }
class ElasticsearchQuerystringBackend(ElasticsearchWildcardHandlingMixin, SingleTextQueryBackend):
class ElasticsearchQuerystringBackend(DeepFieldMappingMixin, ElasticsearchWildcardHandlingMixin, SingleTextQueryBackend):
"""Converts Sigma rule into Elasticsearch query string. Only searches, no aggregations."""
identifier = "es-qs"
active = True
@@ -244,7 +298,7 @@ class ElasticsearchQuerystringBackend(ElasticsearchWildcardHandlingMixin, Single
else:
return super().generateSubexpressionNode(node)
class ElasticsearchDSLBackend(RulenameCommentMixin, ElasticsearchWildcardHandlingMixin, BaseBackend):
class ElasticsearchDSLBackend(DeepFieldMappingMixin, RulenameCommentMixin, ElasticsearchWildcardHandlingMixin, BaseBackend):
"""ElasticSearch DSL backend"""
identifier = 'es-dsl'
active = True
@@ -579,7 +633,8 @@ class KibanaBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin):
if self.output_type == "import": # output format that can be imported via Kibana UI
for item in self.kibanaconf: # JSONize kibanaSavedObjectMeta.searchSourceJSON
item['_source']['kibanaSavedObjectMeta']['searchSourceJSON'] = json.dumps(item['_source']['kibanaSavedObjectMeta']['searchSourceJSON'])
return json.dumps(self.kibanaconf, indent=2)
if self.kibanaconf:
return json.dumps(self.kibanaconf, indent=2)
elif self.output_type == "curl":
for item in self.indexsearch:
return item
@@ -908,7 +963,7 @@ class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin)
raise NotImplementedError("Output type '%s' not supported" % self.output_type)
return result
class ElastalertBackend(MultiRuleOutputMixin):
class ElastalertBackend(DeepFieldMappingMixin, MultiRuleOutputMixin):
"""Elastalert backend"""
active = True
supported_alert_methods = {'email', 'http_post'}
@@ -1202,12 +1257,14 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend):
tags = configs.get("tags", [])
tactics_list = list()
technics_list = list()
new_tags = list()
for tag in tags:
tag = tag.replace("attack.", "")
if re.match("[t][0-9]{4}", tag, re.IGNORECASE):
tech = self.find_technique(tag.title())
if tech:
new_tags.append(tag.title())
technics_list.append(tech)
else:
if "_" in tag:
@@ -1215,22 +1272,29 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend):
tag_list = [item.title() for item in tag_list]
tact = self.find_tactics(key_name=" ".join(tag_list))
if tact:
new_tags.append(" ".join(tag_list))
tactics_list.append(tact)
elif re.match("[ta][0-9]{4}", tag, re.IGNORECASE):
tact = self.find_tactics(key_id=tag.upper())
if tact:
new_tags.append(tag.upper())
tactics_list.append(tact)
else:
tact = self.find_tactics(key_name=tag.title())
if tact:
new_tags.append(tag.title())
tactics_list.append(tact)
threat = self.create_threat_description(tactics_list=tactics_list, techniques_list=technics_list)
rule_id = configs.get("title", "").lower().replace(" ", "_")
rule_name = configs.get("title", "").lower()
rule_id = re.sub(re.compile('[()*+!,\[\].\s"]'), "_", rule_name)
risk_score = self.map_risk_score(configs.get("level", "medium"))
references = configs.get("reference")
if references is None:
references = configs.get("references")
rule = {
"description": configs.get("description", ""),
"enabled": True,
"false_positives": configs.get('falsepositives'),
"false_positives": configs.get('falsepositives', "Unkown"),
"filters": [],
"from": "now-360s",
"immutable": False,
@@ -1243,15 +1307,16 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend):
"risk_score": risk_score,
"name": configs.get("title", ""),
"query":configs.get("translation"),
"references": configs.get("references"),
"meta": {
"from": "1m"
},
"severity": configs.get("level", "medium"),
"tags": tags,
"tags": new_tags,
"to": "now",
"type": "query",
"threat": threat,
"version": 1
}
if references:
rule.update({"references": references})
return json.dumps(rule)
+160
View File
@@ -0,0 +1,160 @@
# Output backends for sigmac
# Copyright 2016-2018 Thomas Patzke, Florian Roth, Roey
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
from sigma.parser.condition import SigmaAggregationParser
from .base import SingleTextQueryBackend
from .mixins import MultiRuleOutputMixin
class HumioBackend(SingleTextQueryBackend):
"""Converts Sigma rule into Humio query."""
identifier = "humio"
active = True
reEscape = re.compile('("|(?<!\\\\)\\\\(?![*?\\\\]))')
reClear = None
andToken = " "
orToken = " or "
notToken = "!"
subExpression = "%s"
listExpression = "%s"
listSeparator = " "
valueExpression = "\"%s\""
nullExpression = "NOT %s=\"*\""
notNullExpression = "%s=\"*\""
mapExpression = "%s=%s"
regexExpression = "regex(\"%s=(\\\"%s\\\")\")"
mapListsSpecialHandling = True
mapListValueExpression = "%s IN %s"
typedValueExpression = {
SigmaRegularExpressionModifier: "/%s/"
}
def generateMapItemNode(self, node):
key, value = node
if isinstance(value, SigmaRegularExpressionModifier):# or isinstance(value, str) and "*" in value :
return self.regexExpression % (key, self.cleanValue(value))
else:
return super().generateMapItemNode(node)
def generateNOTNode(self, node):
generated = self.generateNode(node.item)
if generated is not None:
return "%s(%s)" % (self.notToken, generated)
else:
return None
def generateANDNode(self, node):
generated = [self.generateNode(val) for val in node]
filtered = [g for g in generated if g is not None]
if filtered:
if self.sort_condition_lists:
filtered = sorted(filtered)
if any([item for item in filtered if "regex" in item]):
res = ""
for item in filtered:
if item.startswith("regex"):
if res.endswith(" | "):
res = res.rstrip(" | ")
res += " | %s | " % item.strip(" | ")
else:
res += item
return res.strip(" | ")
return self.andToken.join(filtered)
else:
return None
def generateORNode(self, node):
generated = [self.generateNode(val) for val in node]
filtered = [g.strip(" | ") for g in generated if g is not None]
if filtered:
if self.sort_condition_lists:
filtered = sorted(filtered)
if any([item for item in filtered if "regex" in item]):
res = ""
for item in filtered:
if item.startswith("regex"):
if res.endswith(" | "):
res = res.rstrip(" | ")
res += " | %s | " % item.strip(" | ")
else:
res += item
return res.strip(" | ")
return self.orToken.join(filtered)
else:
return None
def cleanValue(self, val):
if isinstance(val, SigmaRegularExpressionModifier):
val = val.value
if "\\" in val:
val = re.sub(r"\\", r"\\\\\\", val)
# if (val.startswith("*") or val.endswith("*")) and "\\" in val:
# val = re.sub(r"\\", r"\\\\\\", val)
return super().cleanValue(val)
def generateMapItemListNode(self, key, value):
if isinstance(value, SigmaRegularExpressionModifier):
key_mapped = self.fieldNameMapping(key, value)
return {'regexp': {key_mapped: str(value)}}
# if any([item for item in value if "*" in item]):
# return (" | " + " | ".join([self.regexExpression % (key, self.cleanValue(item)) for item in value]) + " | ")
if not set([type(val) for val in value]).issubset({str, int}):
raise TypeError("List values must be strings or numbers")
return (" or ".join(['%s=%s' % (key, self.generateValueNode(item)) for item in value]))
def generateAggregation(self, agg):
if agg == None:
return ""
if agg.aggfunc == SigmaAggregationParser.AGGFUNC_NEAR:
raise NotImplementedError("The 'near' aggregation operator is not yet implemented for this backend")
if agg.groupfield == None:
if agg.aggfunc_notrans == 'count':
if agg.aggfield == None :
return " | val := count() | val %s %s" % (agg.cond_op, agg.condition)
else:
agg.aggfunc_notrans = 'dc'
return " | count(field=%s, distinct=true, as=val) | val %s %s" % (agg.aggfield or "", agg.cond_op, agg.condition)
else:
if agg.aggfunc_notrans == 'count':
if agg.aggfield == None :
return " | val := count(field=%s) | val %s %s" % (agg.groupfield or "", agg.cond_op, agg.condition)
else:
agg.aggfunc_notrans = 'dc'
return " | groupby(field=%s, function=count(field=%s, distinct=true, as=val)) | val %s %s" % (agg.groupfield or "", agg.aggfield or "", agg.cond_op, agg.condition)
def generate(self, sigmaparser):
"""Method is called for each sigma rule and receives the parsed rule (SigmaParser)"""
for parsed in sigmaparser.condparsed:
query = self.generateQuery(parsed)
#before = self.generateBefore(parsed)
#after = self.generateAfter(parsed)
result = ""
# if before is not None:
# result = before
if query is not None:
result += query
# if after is not None:
# result += after
if result.endswith(" | "):
result = result.strip(" | ")
return result
+609 -609
View File
@@ -1,609 +1,609 @@
# LimaCharlie backend for sigmac created by LimaCharlie.io
# Copyright 2019 Refraction Point, Inc
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import yaml
from collections import namedtuple
from .base import BaseBackend
from sigma.parser.modifiers.base import SigmaTypeModifier
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
# A few helper functions for cases where field mapping cannot be done
# as easily one by one, or can be done more efficiently.
def _windowsEventLogFieldName(fieldName):
if 'EventID' == fieldName:
return 'Event/System/EventID'
return 'Event/EventData/%s' % (fieldName,)
def _mapProcessCreationOperations(node):
# Here we fix some common pitfalls found in rules
# in a consistent fashion (already processed to D&R rule).
# First fixup is looking for a specific path prefix
# based on a specific drive letter. There are many cases
# where the driver letter can change or where the early
# boot process refers to it as "\Device\HarddiskVolume1\".
if ("starts with" == node["op"] and
"event/FILE_PATH" == node["path"] and
node["value"].lower().startswith("c:\\")):
node["op"] = "matches"
node["re"] = "^(?:(?:.:)|(?:\\\\Device\\\\HarddiskVolume.))\\\\%s" % (re.escape(node["value"][3:]),)
del(node["value"])
return node
# We support many different log sources so we keep different mapping depending
# on the log source and category.
# The mapping key is product/category/service.
# The mapping value is tuple like:
# - top-level parameters
# - pre-condition is a D&R rule node filtering relevant events.
# - field mappings is a dict with a mapping or a callable to convert the field name.
# Individual mapping values can also be callabled(fieldname, value) returning a new fieldname and value.
# - isAllStringValues is a bool indicating whether all values should be converted to string.
# - keywordField is the field name to alias for keywords if supported or None if not.
# - postOpMapper is a callback that can modify an operation once it has been generated.
SigmaLCConfig = namedtuple('SigmaLCConfig', [
'topLevelParams',
'preConditions',
'fieldMappings',
'isAllStringValues',
'keywordField',
'postOpMapper',
])
_allFieldMappings = {
"windows/process_creation/": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is windows",
},
fieldMappings = {
"CommandLine": "event/COMMAND_LINE",
"Image": "event/FILE_PATH",
"ParentImage": "event/PARENT/FILE_PATH",
"ParentCommandLine": "event/PARENT/COMMAND_LINE",
"User": "event/USER_NAME",
"OriginalFileName": "event/ORIGINAL_FILE_NAME",
# Custom field names coming from somewhere unknown.
"NewProcessName": "event/FILE_PATH",
"ProcessCommandLine": "event/COMMAND_LINE",
# Another one-off command line.
"Command": "event/COMMAND_LINE",
},
isAllStringValues = False,
keywordField = "event/COMMAND_LINE",
postOpMapper = _mapProcessCreationOperations
),
"windows//": SigmaLCConfig(
topLevelParams = {
"target": "log",
"log type": "wel",
},
preConditions = None,
fieldMappings = _windowsEventLogFieldName,
isAllStringValues = True,
keywordField = None,
postOpMapper = None
),
"windows_defender//": SigmaLCConfig(
topLevelParams = {
"target": "log",
"log type": "wel",
},
preConditions = None,
fieldMappings = _windowsEventLogFieldName,
isAllStringValues = True,
keywordField = None,
postOpMapper = None
),
"dns//": SigmaLCConfig(
topLevelParams = {
"event": "DNS_REQUEST",
},
preConditions = None,
fieldMappings = {
"query": "event/DOMAIN_NAME",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
"linux//": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is linux",
},
fieldMappings = {
"exe": "event/FILE_PATH",
"type": None,
},
isAllStringValues = False,
keywordField = 'event/COMMAND_LINE',
postOpMapper = None
),
"unix//": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is linux",
},
fieldMappings = {
"exe": "event/FILE_PATH",
"type": None,
},
isAllStringValues = False,
keywordField = 'event/COMMAND_LINE',
postOpMapper = None
),
"netflow//": SigmaLCConfig(
topLevelParams = {
"event": "NETWORK_CONNECTIONS",
},
preConditions = None,
fieldMappings = {
"destination.port": "event/NETWORK_ACTIVITY/DESTINATION/PORT",
"source.port": "event/NETWORK_ACTIVITY/SOURCE/PORT",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
"/proxy/": SigmaLCConfig(
topLevelParams = {
"event": "HTTP_REQUEST",
},
preConditions = None,
fieldMappings = {
"c-uri|contains": "event/URL",
"c-uri": "event/URL",
"URL": "event/URL",
"cs-uri-query": "event/URL",
"cs-uri-stem": "event/URL",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
}
class LimaCharlieBackend(BaseBackend):
"""Converts Sigma rule into LimaCharlie D&R rules. Contributed by LimaCharlie. https://limacharlie.io"""
identifier = "limacharlie"
active = True
config_required = False
default_config = ["limacharlie"]
def generate(self, sigmaparser):
# Take the log source information and figure out which set of mappings to use.
ruleConfig = sigmaparser.parsedyaml
ls_rule = ruleConfig['logsource']
try:
category = ls_rule['category']
except KeyError:
category = ""
try:
product = ls_rule['product']
except KeyError:
product = ""
# try:
# service = ls_rule['service']
# except KeyError:
# service = ""
# If there is a timeframe component, we do not currently
# support it for now.
if ruleConfig.get( 'detection', {} ).get( 'timeframe', None ) is not None:
raise NotImplementedError("Timeframes are not supported by backend.")
# Don't use service for now, most Windows Event Logs
# uses a different service with no category, since we
# treat all Windows Event Logs together we can ignore
# the service.
service = ""
# See if we have a definition for the source combination.
mappingKey = "%s/%s/%s" % (product, category, service)
topFilter, preCond, mappings, isAllStringValues, keywordField, postOpMapper = _allFieldMappings.get(mappingKey, tuple([None, None, None, None, None, None]))
if mappings is None:
raise NotImplementedError("Log source %s/%s/%s not supported by backend." % (product, category, service))
# Field name conversions.
self._fieldMappingInEffect = mappings
# LC event type pre-selector for the type of data.
self._preCondition = preCond
# Are all the values treated as strings?
self._isAllStringValues = isAllStringValues
# Are we supporting keywords full text search?
self._keywordField = keywordField
# Call to fixup all operations after the fact.
self._postOpMapper = postOpMapper
# Call the original generation code.
detectComponent = super().generate(sigmaparser)
# We expect a string (yaml) as output, so if
# we get anything else we assume it's a core
# library value and just return it as-is.
if not isinstance( detectComponent, str):
return detectComponent
# This redundant to deserialize it right after
# generating the yaml, but we try to use the parent
# official class code as much as possible for future
# compatibility.
detectComponent = yaml.safe_load(detectComponent)
# Check that we got a proper node and not just a string
# which we don't really know what to do with.
if not isinstance(detectComponent, dict):
raise NotImplementedError("Selection combination not supported.")
# Apply top level filter.
detectComponent.update(topFilter)
# Now prepare the Response component.
respondComponents = [{
"action": "report",
"name": ruleConfig["title"],
}]
# Add a lot of the metadata available to the report.
if ruleConfig.get("tags", None) is not None:
respondComponents[0].setdefault("metadata", {})["tags"] = ruleConfig["tags"]
if ruleConfig.get("description", None) is not None:
respondComponents[0].setdefault("metadata", {})["description"] = ruleConfig["description"]
if ruleConfig.get("references", None) is not None:
respondComponents[0].setdefault("metadata", {})["references"] = ruleConfig["references"]
if ruleConfig.get("level", None) is not None:
respondComponents[0].setdefault("metadata", {})["level"] = ruleConfig["level"]
if ruleConfig.get("author", None) is not None:
respondComponents[0].setdefault("metadata", {})["author"] = ruleConfig["author"]
if ruleConfig.get("falsepositives", None) is not None:
respondComponents[0].setdefault("metadata", {})["falsepositives"] = ruleConfig["falsepositives"]
# Assemble it all as a single, complete D&R rule.
return yaml.safe_dump({
"detect": detectComponent,
"respond": respondComponents,
}, default_flow_style = False)
def generateQuery(self, parsed):
# We override the generateQuery function because
# we generate proper JSON structures internally
# and only convert to string (yaml) once the
# whole thing is assembled.
result = self.generateNode(parsed.parsedSearch)
if self._preCondition is not None:
result = {
"op": "and",
"rules": [
self._preCondition,
result,
]
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return yaml.safe_dump(result)
def generateANDNode(self, node):
generated = [ self.generateNode(val) for val in node ]
filtered = [ g for g in generated if g is not None ]
if not filtered:
return None
# Map any possible keywords.
filtered = self._mapKeywordVals(filtered)
if 1 == len(filtered):
if self._postOpMapper is not None:
filtered[0] = self._postOpMapper(filtered[0])
return filtered[0]
result = {
"op": "and",
"rules": filtered,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
def generateORNode(self, node):
generated = [self.generateNode(val) for val in node]
filtered = [g for g in generated if g is not None]
if not filtered:
return None
# Map any possible keywords.
filtered = self._mapKeywordVals(filtered)
if 1 == len(filtered):
if self._postOpMapper is not None:
filtered[0] = self._postOpMapper(filtered[0])
return filtered[0]
result = {
"op": "or",
"rules": filtered,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
def generateNOTNode(self, node):
generated = self.generateNode(node.item)
if generated is None:
return None
if not isinstance(generated, dict):
raise NotImplementedError("Not operator not available on non-dict nodes.")
generated["not"] = not generated.get("not", False)
return generated
def generateSubexpressionNode(self, node):
return self.generateNode(node.items)
def generateListNode(self, node):
return [self.generateNode(value) for value in node]
def generateMapItemNode(self, node):
fieldname, value = node
fieldNameAndValCallback = None
# The mapping can be a dictionary of mapping or a callable
# to get the correct value.
if callable(self._fieldMappingInEffect):
fieldname = self._fieldMappingInEffect(fieldname)
else:
try:
# The mapping can also be a callable that will
# return a mapped key AND value.
if callable(self._fieldMappingInEffect[fieldname]):
fieldNameAndValCallback = self._fieldMappingInEffect[fieldname]
else:
fieldname = self._fieldMappingInEffect[fieldname]
except:
raise NotImplementedError("Field name %s not supported by backend." % (fieldname,))
# If fieldname returned is None, it's a special case where we
# ignore the node.
if fieldname is None:
return None
if isinstance(value, (int, str)):
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
op, newVal = self._valuePatternToLcOp(value)
newOp = {
"op": op,
"path": fieldname,
"case sensitive": False,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
if self._postOpMapper is not None:
newOp = self._postOpMapper(newOp)
return newOp
elif isinstance(value, list):
subOps = []
for v in value:
if fieldNameAndValCallback is not None:
fieldname, v = fieldNameAndValCallback(fieldname, v)
op, newVal = self._valuePatternToLcOp(v)
newOp = {
"op": op,
"path": fieldname,
"case sensitive": False,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
if self._postOpMapper is not None:
newOp = self._postOpMapper(newOp)
subOps.append(newOp)
if 1 == len(subOps):
return subOps[0]
return {
"op": "or",
"rules": subOps
}
elif isinstance(value, SigmaTypeModifier):
if isinstance(value, SigmaRegularExpressionModifier):
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
result = {
"op": "matches",
"path": fieldname,
"re": re.compile(value),
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
else:
raise TypeError("Backend does not support TypeModifier: %s" % (str(type(value))))
elif value is None:
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
result = {
"op": "exists",
"not": True,
"path": fieldname,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
def generateValueNode(self, node):
return node
def _valuePatternToLcOp(self, val):
# Here we convert the string values supported by Sigma that
# can include wildcards into either proper values (string or int)
# or into altered values to be functionally equivalent using
# a few different LC D&R rule operators.
# No point evaluating non-strings.
if not isinstance(val, str):
return ("is", str(val) if self._isAllStringValues else val)
# Is there any wildcard in this string? If not, we can short circuit.
if "*" not in val and "?" not in val:
return ("is", val)
# Now we do a small optimization for the shortcut operators
# available in LC. We try to see if the wildcards are around
# the main value, but NOT within. If that's the case we can
# use the "starts with", "ends with" or "contains" operators.
isStartsWithWildcard = False
isEndsWithWildcard = False
tmpVal = val
if tmpVal.startswith("*"):
isStartsWithWildcard = True
tmpVal = tmpVal[1:]
if tmpVal.endswith("*") and not (tmpVal.endswith("\\*") and not tmpVal.endswith("\\\\*")):
isEndsWithWildcard = True
if tmpVal.endswith("\\\\*"):
# An extra \ had to be there so it didn't escapte the
# *, but since we plan on removing the *, we can also
# remove one \.
tmpVal = tmpVal[:-2]
else:
tmpVal = tmpVal[:-1]
# Check to see if there are any other wildcards. If there are
# we cannot use our shortcuts.
if "*" not in tmpVal and "?" not in tmpVal:
if isStartsWithWildcard and isEndsWithWildcard:
return ("contains", tmpVal)
if isStartsWithWildcard:
return ("ends with", tmpVal)
if isEndsWithWildcard:
return ("starts with", tmpVal)
# This is messy, but it is accurate in generating a RE based on
# the simplified wildcard system, while also supporting the
# escaping of those wildcards.
segments = []
tmpVal = val
while True:
nEscapes = 0
for i in range(len(tmpVal)):
# We keep a running count of backslash escape
# characters we see so that if we meet a wildcard
# we can tell whether the wildcard is escaped
# (with odd number of escapes) or if it's just a
# backslash literal before a wildcard (even number).
if "\\" == tmpVal[i]:
nEscapes += 1
continue
if "*" == tmpVal[i]:
if 0 == nEscapes:
segments.append(re.escape(tmpVal[:i]))
segments.append(".*")
elif nEscapes % 2 == 0:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i])
segments.append(".*")
else:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i + 1])
tmpVal = tmpVal[i + 1:]
break
if "?" == tmpVal[i]:
if 0 == nEscapes:
segments.append(re.escape(tmpVal[:i]))
segments.append(".")
elif nEscapes % 2 == 0:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i])
segments.append(".")
else:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i + 1])
tmpVal = tmpVal[i + 1:]
break
nEscapes = 0
else:
segments.append(re.escape(tmpVal))
break
val = ''.join(segments)
return ("matches", val)
def _mapKeywordVals(self, values):
# This function ensures that the list of values passed
# are proper D&R operations, if they are strings it indicates
# they were requested as keyword matches. We only support
# keyword matches when specified in the config. We generally just
# map them to the most common field in LC that makes sense.
mapped = []
for val in values:
# Non-keywords are just passed through.
if not isinstance(val, str):
mapped.append(val)
continue
if self._keywordField is None:
raise NotImplementedError("Full-text keyboard searches not supported.")
# This seems to be indicative only of "keywords" which are mostly
# representative of full-text searches. We don't suport that but
# in some data sources we can alias them to an actual field.
op, newVal = self._valuePatternToLcOp(val)
newOp = {
"op": op,
"path": self._keywordField,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
mapped.append(newOp)
return mapped
# LimaCharlie backend for sigmac created by LimaCharlie.io
# Copyright 2019 Refraction Point, Inc
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import yaml
from collections import namedtuple
from .base import BaseBackend
from sigma.parser.modifiers.base import SigmaTypeModifier
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
# A few helper functions for cases where field mapping cannot be done
# as easily one by one, or can be done more efficiently.
def _windowsEventLogFieldName(fieldName):
if 'EventID' == fieldName:
return 'Event/System/EventID'
return 'Event/EventData/%s' % (fieldName,)
def _mapProcessCreationOperations(node):
# Here we fix some common pitfalls found in rules
# in a consistent fashion (already processed to D&R rule).
# First fixup is looking for a specific path prefix
# based on a specific drive letter. There are many cases
# where the driver letter can change or where the early
# boot process refers to it as "\Device\HarddiskVolume1\".
if ("starts with" == node["op"] and
"event/FILE_PATH" == node["path"] and
node["value"].lower().startswith("c:\\")):
node["op"] = "matches"
node["re"] = "^(?:(?:.:)|(?:\\\\Device\\\\HarddiskVolume.))\\\\%s" % (re.escape(node["value"][3:]),)
del(node["value"])
return node
# We support many different log sources so we keep different mapping depending
# on the log source and category.
# The mapping key is product/category/service.
# The mapping value is tuple like:
# - top-level parameters
# - pre-condition is a D&R rule node filtering relevant events.
# - field mappings is a dict with a mapping or a callable to convert the field name.
# Individual mapping values can also be callabled(fieldname, value) returning a new fieldname and value.
# - isAllStringValues is a bool indicating whether all values should be converted to string.
# - keywordField is the field name to alias for keywords if supported or None if not.
# - postOpMapper is a callback that can modify an operation once it has been generated.
SigmaLCConfig = namedtuple('SigmaLCConfig', [
'topLevelParams',
'preConditions',
'fieldMappings',
'isAllStringValues',
'keywordField',
'postOpMapper',
])
_allFieldMappings = {
"windows/process_creation/": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is windows",
},
fieldMappings = {
"CommandLine": "event/COMMAND_LINE",
"Image": "event/FILE_PATH",
"ParentImage": "event/PARENT/FILE_PATH",
"ParentCommandLine": "event/PARENT/COMMAND_LINE",
"User": "event/USER_NAME",
"OriginalFileName": "event/ORIGINAL_FILE_NAME",
# Custom field names coming from somewhere unknown.
"NewProcessName": "event/FILE_PATH",
"ProcessCommandLine": "event/COMMAND_LINE",
# Another one-off command line.
"Command": "event/COMMAND_LINE",
},
isAllStringValues = False,
keywordField = "event/COMMAND_LINE",
postOpMapper = _mapProcessCreationOperations
),
"windows//": SigmaLCConfig(
topLevelParams = {
"target": "log",
"log type": "wel",
},
preConditions = None,
fieldMappings = _windowsEventLogFieldName,
isAllStringValues = True,
keywordField = None,
postOpMapper = None
),
"windows_defender//": SigmaLCConfig(
topLevelParams = {
"target": "log",
"log type": "wel",
},
preConditions = None,
fieldMappings = _windowsEventLogFieldName,
isAllStringValues = True,
keywordField = None,
postOpMapper = None
),
"dns//": SigmaLCConfig(
topLevelParams = {
"event": "DNS_REQUEST",
},
preConditions = None,
fieldMappings = {
"query": "event/DOMAIN_NAME",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
"linux//": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is linux",
},
fieldMappings = {
"exe": "event/FILE_PATH",
"type": None,
},
isAllStringValues = False,
keywordField = 'event/COMMAND_LINE',
postOpMapper = None
),
"unix//": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is linux",
},
fieldMappings = {
"exe": "event/FILE_PATH",
"type": None,
},
isAllStringValues = False,
keywordField = 'event/COMMAND_LINE',
postOpMapper = None
),
"netflow//": SigmaLCConfig(
topLevelParams = {
"event": "NETWORK_CONNECTIONS",
},
preConditions = None,
fieldMappings = {
"destination.port": "event/NETWORK_ACTIVITY/DESTINATION/PORT",
"source.port": "event/NETWORK_ACTIVITY/SOURCE/PORT",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
"/proxy/": SigmaLCConfig(
topLevelParams = {
"event": "HTTP_REQUEST",
},
preConditions = None,
fieldMappings = {
"c-uri|contains": "event/URL",
"c-uri": "event/URL",
"URL": "event/URL",
"cs-uri-query": "event/URL",
"cs-uri-stem": "event/URL",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
}
class LimaCharlieBackend(BaseBackend):
"""Converts Sigma rule into LimaCharlie D&R rules. Contributed by LimaCharlie. https://limacharlie.io"""
identifier = "limacharlie"
active = True
config_required = False
default_config = ["limacharlie"]
def generate(self, sigmaparser):
# Take the log source information and figure out which set of mappings to use.
ruleConfig = sigmaparser.parsedyaml
ls_rule = ruleConfig['logsource']
try:
category = ls_rule['category']
except KeyError:
category = ""
try:
product = ls_rule['product']
except KeyError:
product = ""
# try:
# service = ls_rule['service']
# except KeyError:
# service = ""
# If there is a timeframe component, we do not currently
# support it for now.
if ruleConfig.get( 'detection', {} ).get( 'timeframe', None ) is not None:
raise NotImplementedError("Timeframes are not supported by backend.")
# Don't use service for now, most Windows Event Logs
# uses a different service with no category, since we
# treat all Windows Event Logs together we can ignore
# the service.
service = ""
# See if we have a definition for the source combination.
mappingKey = "%s/%s/%s" % (product, category, service)
topFilter, preCond, mappings, isAllStringValues, keywordField, postOpMapper = _allFieldMappings.get(mappingKey, tuple([None, None, None, None, None, None]))
if mappings is None:
raise NotImplementedError("Log source %s/%s/%s not supported by backend." % (product, category, service))
# Field name conversions.
self._fieldMappingInEffect = mappings
# LC event type pre-selector for the type of data.
self._preCondition = preCond
# Are all the values treated as strings?
self._isAllStringValues = isAllStringValues
# Are we supporting keywords full text search?
self._keywordField = keywordField
# Call to fixup all operations after the fact.
self._postOpMapper = postOpMapper
# Call the original generation code.
detectComponent = super().generate(sigmaparser)
# We expect a string (yaml) as output, so if
# we get anything else we assume it's a core
# library value and just return it as-is.
if not isinstance( detectComponent, str):
return detectComponent
# This redundant to deserialize it right after
# generating the yaml, but we try to use the parent
# official class code as much as possible for future
# compatibility.
detectComponent = yaml.safe_load(detectComponent)
# Check that we got a proper node and not just a string
# which we don't really know what to do with.
if not isinstance(detectComponent, dict):
raise NotImplementedError("Selection combination not supported.")
# Apply top level filter.
detectComponent.update(topFilter)
# Now prepare the Response component.
respondComponents = [{
"action": "report",
"name": ruleConfig["title"],
}]
# Add a lot of the metadata available to the report.
if ruleConfig.get("tags", None) is not None:
respondComponents[0].setdefault("metadata", {})["tags"] = ruleConfig["tags"]
if ruleConfig.get("description", None) is not None:
respondComponents[0].setdefault("metadata", {})["description"] = ruleConfig["description"]
if ruleConfig.get("references", None) is not None:
respondComponents[0].setdefault("metadata", {})["references"] = ruleConfig["references"]
if ruleConfig.get("level", None) is not None:
respondComponents[0].setdefault("metadata", {})["level"] = ruleConfig["level"]
if ruleConfig.get("author", None) is not None:
respondComponents[0].setdefault("metadata", {})["author"] = ruleConfig["author"]
if ruleConfig.get("falsepositives", None) is not None:
respondComponents[0].setdefault("metadata", {})["falsepositives"] = ruleConfig["falsepositives"]
# Assemble it all as a single, complete D&R rule.
return yaml.safe_dump({
"detect": detectComponent,
"respond": respondComponents,
}, default_flow_style = False)
def generateQuery(self, parsed):
# We override the generateQuery function because
# we generate proper JSON structures internally
# and only convert to string (yaml) once the
# whole thing is assembled.
result = self.generateNode(parsed.parsedSearch)
if self._preCondition is not None:
result = {
"op": "and",
"rules": [
self._preCondition,
result,
]
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return yaml.safe_dump(result)
def generateANDNode(self, node):
generated = [ self.generateNode(val) for val in node ]
filtered = [ g for g in generated if g is not None ]
if not filtered:
return None
# Map any possible keywords.
filtered = self._mapKeywordVals(filtered)
if 1 == len(filtered):
if self._postOpMapper is not None:
filtered[0] = self._postOpMapper(filtered[0])
return filtered[0]
result = {
"op": "and",
"rules": filtered,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
def generateORNode(self, node):
generated = [self.generateNode(val) for val in node]
filtered = [g for g in generated if g is not None]
if not filtered:
return None
# Map any possible keywords.
filtered = self._mapKeywordVals(filtered)
if 1 == len(filtered):
if self._postOpMapper is not None:
filtered[0] = self._postOpMapper(filtered[0])
return filtered[0]
result = {
"op": "or",
"rules": filtered,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
def generateNOTNode(self, node):
generated = self.generateNode(node.item)
if generated is None:
return None
if not isinstance(generated, dict):
raise NotImplementedError("Not operator not available on non-dict nodes.")
generated["not"] = not generated.get("not", False)
return generated
def generateSubexpressionNode(self, node):
return self.generateNode(node.items)
def generateListNode(self, node):
return [self.generateNode(value) for value in node]
def generateMapItemNode(self, node):
fieldname, value = node
fieldNameAndValCallback = None
# The mapping can be a dictionary of mapping or a callable
# to get the correct value.
if callable(self._fieldMappingInEffect):
fieldname = self._fieldMappingInEffect(fieldname)
else:
try:
# The mapping can also be a callable that will
# return a mapped key AND value.
if callable(self._fieldMappingInEffect[fieldname]):
fieldNameAndValCallback = self._fieldMappingInEffect[fieldname]
else:
fieldname = self._fieldMappingInEffect[fieldname]
except:
raise NotImplementedError("Field name %s not supported by backend." % (fieldname,))
# If fieldname returned is None, it's a special case where we
# ignore the node.
if fieldname is None:
return None
if isinstance(value, (int, str)):
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
op, newVal = self._valuePatternToLcOp(value)
newOp = {
"op": op,
"path": fieldname,
"case sensitive": False,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
if self._postOpMapper is not None:
newOp = self._postOpMapper(newOp)
return newOp
elif isinstance(value, list):
subOps = []
for v in value:
if fieldNameAndValCallback is not None:
fieldname, v = fieldNameAndValCallback(fieldname, v)
op, newVal = self._valuePatternToLcOp(v)
newOp = {
"op": op,
"path": fieldname,
"case sensitive": False,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
if self._postOpMapper is not None:
newOp = self._postOpMapper(newOp)
subOps.append(newOp)
if 1 == len(subOps):
return subOps[0]
return {
"op": "or",
"rules": subOps
}
elif isinstance(value, SigmaTypeModifier):
if isinstance(value, SigmaRegularExpressionModifier):
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
result = {
"op": "matches",
"path": fieldname,
"re": re.compile(value),
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
else:
raise TypeError("Backend does not support TypeModifier: %s" % (str(type(value))))
elif value is None:
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
result = {
"op": "exists",
"not": True,
"path": fieldname,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
def generateValueNode(self, node):
return node
def _valuePatternToLcOp(self, val):
# Here we convert the string values supported by Sigma that
# can include wildcards into either proper values (string or int)
# or into altered values to be functionally equivalent using
# a few different LC D&R rule operators.
# No point evaluating non-strings.
if not isinstance(val, str):
return ("is", str(val) if self._isAllStringValues else val)
# Is there any wildcard in this string? If not, we can short circuit.
if "*" not in val and "?" not in val:
return ("is", val)
# Now we do a small optimization for the shortcut operators
# available in LC. We try to see if the wildcards are around
# the main value, but NOT within. If that's the case we can
# use the "starts with", "ends with" or "contains" operators.
isStartsWithWildcard = False
isEndsWithWildcard = False
tmpVal = val
if tmpVal.startswith("*"):
isStartsWithWildcard = True
tmpVal = tmpVal[1:]
if tmpVal.endswith("*") and not (tmpVal.endswith("\\*") and not tmpVal.endswith("\\\\*")):
isEndsWithWildcard = True
if tmpVal.endswith("\\\\*"):
# An extra \ had to be there so it didn't escapte the
# *, but since we plan on removing the *, we can also
# remove one \.
tmpVal = tmpVal[:-2]
else:
tmpVal = tmpVal[:-1]
# Check to see if there are any other wildcards. If there are
# we cannot use our shortcuts.
if "*" not in tmpVal and "?" not in tmpVal:
if isStartsWithWildcard and isEndsWithWildcard:
return ("contains", tmpVal)
if isStartsWithWildcard:
return ("ends with", tmpVal)
if isEndsWithWildcard:
return ("starts with", tmpVal)
# This is messy, but it is accurate in generating a RE based on
# the simplified wildcard system, while also supporting the
# escaping of those wildcards.
segments = []
tmpVal = val
while True:
nEscapes = 0
for i in range(len(tmpVal)):
# We keep a running count of backslash escape
# characters we see so that if we meet a wildcard
# we can tell whether the wildcard is escaped
# (with odd number of escapes) or if it's just a
# backslash literal before a wildcard (even number).
if "\\" == tmpVal[i]:
nEscapes += 1
continue
if "*" == tmpVal[i]:
if 0 == nEscapes:
segments.append(re.escape(tmpVal[:i]))
segments.append(".*")
elif nEscapes % 2 == 0:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i])
segments.append(".*")
else:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i + 1])
tmpVal = tmpVal[i + 1:]
break
if "?" == tmpVal[i]:
if 0 == nEscapes:
segments.append(re.escape(tmpVal[:i]))
segments.append(".")
elif nEscapes % 2 == 0:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i])
segments.append(".")
else:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i + 1])
tmpVal = tmpVal[i + 1:]
break
nEscapes = 0
else:
segments.append(re.escape(tmpVal))
break
val = ''.join(segments)
return ("matches", val)
def _mapKeywordVals(self, values):
# This function ensures that the list of values passed
# are proper D&R operations, if they are strings it indicates
# they were requested as keyword matches. We only support
# keyword matches when specified in the config. We generally just
# map them to the most common field in LC that makes sense.
mapped = []
for val in values:
# Non-keywords are just passed through.
if not isinstance(val, str):
mapped.append(val)
continue
if self._keywordField is None:
raise NotImplementedError("Full-text keyboard searches not supported.")
# This seems to be indicative only of "keywords" which are mostly
# representative of full-text searches. We don't suport that but
# in some data sources we can alias them to an actual field.
op, newVal = self._valuePatternToLcOp(val)
newOp = {
"op": op,
"path": self._keywordField,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
mapped.append(newOp)
return mapped
+28
View File
@@ -15,9 +15,25 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
from functools import wraps
from .base import SingleTextQueryBackend
from .exceptions import NotSupportedError
def wrapper(method):
@wraps(method)
def _impl(self, method_args):
key, value, *_ = method_args
if '.keyword' in key:
key = key.split('.keyword')[0]
if key not in self.skip_fields:
method_output = method(self, method_args)
return method_output
else:
return
return _impl
class WindowsDefenderATPBackend(SingleTextQueryBackend):
"""Converts Sigma rule into Microsoft Defender ATP Hunting Queries."""
identifier = "mdatp"
@@ -41,6 +57,16 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend):
mapExpression = "%s == %s"
mapListsSpecialHandling = True
mapListValueExpression = "%s in %s"
skip_fields = {
"Description",
"_exists_",
"FileVersion",
"Product",
"Company",
"ParentProcessName",
"ParentCommandLine"
}
def __init__(self, *args, **kwargs):
"""Initialize field mappings"""
@@ -57,6 +83,7 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend):
"DestinationIp" : ("RemoteIP", self.default_value_mapping),
"DestinationIsIpv6" : ("RemoteIP has \":\"", ),
"DestinationPort" : ("RemotePort", self.default_value_mapping),
"Protocol" : ("RemoteProtocol", self.default_value_mapping),
"Details" : ("RegistryValueData", self.default_value_mapping),
"EventType" : ("ActionType", self.default_value_mapping),
"Image" : ("FolderPath", self.default_value_mapping),
@@ -151,6 +178,7 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend):
return "%s | where tostring(extractjson('$.Command', AdditionalFields)) in~ " % self.table
return "%s | where " % self.table
@wrapper
def generateMapItemNode(self, node):
"""
ATP queries refer to event tables instead of Windows logging event identifiers. This method catches conditions that refer to this field
+38
View File
@@ -72,6 +72,7 @@ class SplunkBackend(SingleTextQueryBackend):
def generate(self, sigmaparser):
"""Method is called for each sigma rule and receives the parsed rule (SigmaParser)"""
columns = list()
mapped =None
try:
for field in sigmaparser.parsedyaml["fields"]:
mapped = sigmaparser.config.get_fieldmapping(field).resolve_fieldname(field, sigmaparser)
@@ -170,3 +171,40 @@ class SplunkXMLBackend(SingleTextQueryBackend, MultiRuleOutputMixin):
def finalize(self):
self.queries += self.dash_suf
return self.queries
class CrowdStrikeBackend(SplunkBackend):
"""Converts Sigma rule into CrowdStrike Search Processing Language (SPL)."""
identifier = "crowdstrike"
def generate(self, sigmaparser):
lgs = sigmaparser.parsedyaml.get("logsource")
if lgs.get("product") == "windows" and (lgs.get("service") == "sysmon" or lgs.get("category") == "process_creation"):
fieldmappings = sigmaparser.config.fieldmappings
detections = sigmaparser.definitions
all_fields = dict()
for det in detections.values():
try:
for field, value in det.items():
if "|" in field:
field = field.split("|")[0]
if any([item for item in fieldmappings.keys() if field == item]):
if field == "EventID" and str(value) == str(1) and lgs.get("service") == "sysmon":
all_fields.update(det)
elif field != "EventID":
all_fields.update(det)
else:
raise NotImplementedError("Not supported fields!")
else:
raise NotImplementedError("Not supported fields!")
except AttributeError: # ignore if detection is not a dict
pass
table_fields = sigmaparser.parsedyaml.get("fields", [])
res_table_fields = []
for fl in table_fields:
if fl in fieldmappings.keys():
res_table_fields.append(fl)
sigmaparser.parsedyaml["fields"] = res_table_fields
return super().generate(sigmaparser)
else:
raise NotImplementedError("Not supported logsources!")