Added Humio, Crowdstrike, Corelight

This commit is contained in:
vh
2020-05-08 13:41:52 +03:00
parent 5dc30bd388
commit fb9c5841f4
29 changed files with 5649 additions and 753 deletions
+101
View File
@@ -0,0 +1,101 @@
title: Azure Sentinel
order: 20
backends:
- ala
- ala-rule
fieldmappings:
ComputerName: Computer
Event-ID: EventID
Event_ID: EventID
eventId: EventID
event_id: EventID
event-id: EventID
eventid: EventID
hashes: Hashes
file_hash: Hashes
url.query: URL
resource.URL: URL
src_ip: SourceIp
source.ip: SourceIp
FileName: TargetFilename
dst_ip: DestinationIP
destination.ip: DestinationIP
event_data.AccessMask: AccessMask
event_data.AllowedToDelegateTo: AllowedToDelegateTo
event_data.AttributeLDAPDisplayName: AttributeLDAPDisplayName
event_data.AuditPolicyChanges: AuditPolicyChanges
event_data.AuthenticationPackageName: AuthenticationPackageName
event_data.CallingProcessName: CallingProcessName
event_data.CallTrace": CallTrace
event_data.CommandLine: CommandLine
Commandline: CommandLine
cmd: CommandLine
event_data.ComputerName: ComputerName
event_data.CurrentDirectory: CurrentDirectory
event_data.Description: Description
event_data.DestinationHostname: DestinationHostname
event_data.DestinationIp: DestinationIp
event_data.DestinationPort: DestinationPort
event_data.Details: Details
event_data.EngineVersion: EngineVersion
event_data.EventType: EventType
event_data.FailureCode: FailureCode
event_data.FileName: FileName
event_data.GrantedAccess: GrantedAccess
event_data.GroupName: GroupName
event_data.GroupSid: GroupSid
event_data.Hashes: Hashes
event_data.HiveName: HiveName
event_data.HostVersion: HostVersion
Image:
service=security: Process
category=process_creation: NewProcessName
default: Image
event_data.Image:
service=security: Process
category=process_creation: NewProcessName
default: Image
event_data.ImageLoaded": ImageLoaded
event_data.ImagePath: ImagePath
event_data.Imphash: Imphash
event_data.IpAddress: IpAddress
event_data.KeyLength: KeyLength
event_data.LogonProcessName: LogonProcessName
event_data.LogonType: LogonType
event_data.NewProcessName: NewProcessName
event_data.ObjectClass: ObjectClass
event_data.ObjectName: ObjectName
event_data.ObjectType: ObjectType
event_data.ObjectValueName: ObjectValueName
event_data.ParentCommandLine: ParentCommandLine
event_data.ParentImage:
category=process_creation: ParentProcessName
default: ParentImage
ParentImage:
category=process_creation: ParentProcessName
default: ParentImage
event_data.ParentProcessName: ParentProcessName
event_data.Path: Path
event_data.PipeName: PipeName
event_data.ProcessCommandLine: CommanProcessCommandLinedLine
event_data.ProcessName: ProcessName
event_data.Properties: Properties
event_data.SecurityID: SecurityID
event_data.ServiceFileName: ServiceFileName
event_data.ServiceName: ServiceName
event_data.ShareName: ShareName
event_data.Signature: Signature
event_data.Source: Source
event_data.SourceImage: SourceImage
event_data.StartModule: StartModule
event_data.Status: Status
event_data.SubjectUserName: SubjectUserName
event_data.SubjectUserSid: SubjectUserSid
event_data.TargetFilename: TargetFilename
event_data.TargetImage: TargetImage
event_data.TargetObject: TargetObject
event_data.TicketEncryptionType: TicketEncryptionType
event_data.TicketOptions: TicketOptions
event_data.User: User
event_data.WorkstationName: WorkstationName
+1053
View File
@@ -0,0 +1,1053 @@
title: ArcSight Corelight Zeek and Corelight Opensource Zeek Configuration
order: 20
backends:
- arcsight
- arcsight-esm
logsources:
zeek:
product: zeek
conditions:
deviceVendor: Bro
zeek-category-accounting:
category: accounting
rewrite:
product: zeek
service: syslog
zeek-category-firewall:
category: firewall
conditions:
deviceEventCategory: conn
zeek-category-dns:
category: dns
conditions:
deviceEventCategory: dns
zeek-category-proxy:
category: proxy
rewrite:
product: zeek
service: http
zeek-category-webserver:
category: webserver
conditions:
deviceEventCategory: http
rewrite:
product: zeek
service: http
zeek-conn:
product: zeek
service: conn
conditions:
deviceEventCategory: conn
zeek-conn_long:
product: zeek
service: conn_long
conditions:
deviceEventCategory: conn_long
zeek-dce_rpc:
product: zeek
service: dce_rpc
conditions:
deviceEventCategory: dce_rpc
zeek-dns:
product: zeek
service: dns
conditions:
deviceEventCategory: dns
zeek-dnp3:
product: zeek
service: dnp3
conditions:
deviceEventCategory: dnp3
zeek-dpd:
product: zeek
service: dpd
conditions:
deviceEventCategory: dpd
zeek-files:
product: zeek
service: files
conditions:
deviceEventCategory: files
zeek-ftp:
product: zeek
service: ftp
conditions:
deviceEventCategory: ftp
zeek-gquic:
product: zeek
service: gquic
conditions:
deviceEventCategory: gquic
zeek-http:
product: zeek
service: http
conditions:
deviceEventCategory: http
zeek-http2:
product: zeek
service: http2
conditions:
deviceEventCategory: http2
zeek-intel:
product: zeek
service: intel
conditions:
deviceEventCategory: intel
zeek-irc:
product: zeek
service: irc
conditions:
deviceEventCategory: irc
zeek-kerberos:
product: zeek
service: kerberos
conditions:
deviceEventCategory: kerberos
zeek-known_certs:
product: zeek
service: known_certs
conditions:
deviceEventCategory: known_certs
zeek-known_hosts:
product: zeek
service: known_hosts
conditions:
deviceEventCategory: known_hosts
zeek-known_modbus:
product: zeek
service: known_modbus
conditions:
deviceEventCategory: known_modbus
zeek-known_services:
product: zeek
service: known_services
conditions:
deviceEventCategory: known_services
zeek-modbus:
product: zeek
service: modbus
conditions:
deviceEventCategory: modbus
zeek-modbus_register_change:
product: zeek
service: modbus_register_change
conditions:
deviceEventCategory: modbus_register_change
zeek-mqtt_connect:
product: zeek
service: mqtt_connect
conditions:
deviceEventCategory: mqtt_connect
zeek-mqtt_publish:
product: zeek
service: mqtt_publish
conditions:
deviceEventCategory: mqtt_publish
zeek-mqtt_subscribe:
product: zeek
service: mqtt_subscribe
conditions:
deviceEventCategory: mqtt_subscribe
zeek-mysql:
product: zeek
service: mysql
conditions:
deviceEventCategory: mysql
zeek-notice:
product: zeek
service: notice
conditions:
deviceEventCategory: notice
zeek-ntlm:
product: zeek
service: ntlm
conditions:
deviceEventCategory: ntlm
zeek-ntp:
product: zeek
service: ntp
conditions:
deviceEventCategory: ntp
zeek-ocsp:
product: zeek
service: ntp
conditions:
deviceEventCategory: ocsp
zeek-pe:
product: zeek
service: pe
conditions:
deviceEventCategory: pe
zeek-pop3:
product: zeek
service: pop3
conditions:
deviceEventCategory: pop3
zeek-radius:
product: zeek
service: radius
conditions:
deviceEventCategory: radius
zeek-rdp:
product: zeek
service: rdp
conditions:
deviceEventCategory: rdp
zeek-rfb:
product: zeek
service: rfb
conditions:
deviceEventCategory: rfb
zeek-sip:
product: zeek
service: sip
conditions:
deviceEventCategory: sip
zeek-smb_files:
product: zeek
service: smb_files
conditions:
deviceEventCategory: smb_files
zeek-smb_mapping:
product: zeek
service: smb_mapping
conditions:
deviceEventCategory: smb_mapping
zeek-smtp:
product: zeek
service: smtp
conditions:
deviceEventCategory: smtp
zeek-smtp_links:
product: zeek
service: smtp_links
conditions:
deviceEventCategory: smtp_links
zeek-snmp:
product: zeek
service: snmp
conditions:
deviceEventCategory: snmp
zeek-socks:
product: zeek
service: socks
conditions:
deviceEventCategory: socks
zeek-software:
product: zeek
service: software
conditions:
deviceEventCategory: software
zeek-ssh:
product: zeek
service: ssh
conditions:
deviceEventCategory: ssh
zeek-ssl:
product: zeek
service: ssl
conditions:
deviceEventCategory: tls
zeek-tls: # In case people call it TLS even though orig log is called ssl, but dataset is tls so may cause confusion so cover that
product: zeek
service: tls
conditions:
deviceEventCategory: tls
zeek-syslog:
product: zeek
service: syslog
conditions:
deviceEventCategory: syslog
zeek-tunnel:
product: zeek
service: tunnel
conditions:
deviceEventCategory: tunnel
zeek-traceroute:
product: zeek
service: traceroute
conditions:
deviceEventCategory: traceroute
zeek-weird:
product: zeek
service: weird
conditions:
deviceEventCategory: weird
zeek-x509:
product: zeek
service: x509
conditions:
deviceEventCategory: x509
zeek-ip_search:
product: zeek
service: network
conditions:
deviceEventCategory:
- conn
- conn_long
- dce_rpc
- dhcp
- dnp3
- dns
- ftp
- gquic
- http
- irc
- kerberos
- modbus
- mqtt_connect
- mqtt_publish
- mqtt_subscribe
- mysql
- ntlm
- ntp
- radius
- rfb
- sip
- smb_files
- smb_mapping
- smtp
- smtp_links
- snmp
- socks
- ssh
- tls #SSL
- tunnel
- weird
fieldmappings:
cs-uri-extension: fileType
cs-uri-path: filePath
s-dns:
- destinationDnsDomain
- destinationHost
# All Logs Applied Mapping & Taxonomy
clientip: sourceAddress
dst: destinationAddress
dst_ip: destinationAddress
dst_port: destinationPort
host: requestHost
#inner_vlan:
mac: sourceMacAddress
mime_type: fileType
network_application: applicationProtocol
#network_community_id:
network_protocol: transportProtocol
password: message
port_num: sourcePort
proto: transportProtocol
#result:
#rtt:
server_name: destinationHostName
src: sourceAddress
src_ip: sourceAddress
src_port: sourcePort
#success:
uri:
- requestUrl
- requestUrlQuery
user: sourceUserName
username: sourceUserName
user_agent:
- deviceCustomString5
- requestClientApplication
#vlan:
# DNS matching Taxonomy & DNS Category
answer: message
#question_length:
record_type: deviceCustomString1
#parent_domain:
# HTTP matching Taxonomy & Web/Proxy Category
cs-bytes: bytesOut
cs-cookie: message
r-dns:
- destinationDnsDomain
- destinationHost
sc-bytes: bytesIn
sc-status: message
c-uri:
- requestUrl
- requestUrlQuery
c-uri-extension: fileType
c-uri-query:
- requestUrl
- requestUrlQuery
c-uri-stem:
- requestUrl
- requestUrlQuery
c-useragent:
- deviceCustomString5
- requestClientApplication
cs-host:
- destinationDnsDomain
- destinationHost
cs-method: requestMethod
cs-referrer:
- deviceCustomString4
- requestContext
cs-version: message
# All log UIDs
#cert_chain_fuids:
#client_cert_chain_fuids:
#client_cert_fuid:
#conn_uids:
#fid:
#fuid:
#fuids:
#id:
#orig_fuids:
#parent_fuid:
#related_fuids:
#resp_fuids:
#server_cert_fuid:
#tunnel_parents:
#uid:
#uids:
#uuid:
# Overlapping fields/mappings (aka: shared fields)
action:
- 'deviceAction'
#service=smb_files:
#service=mqtt:
#service=tunnel:
addl:
- 'message'
#service=dns:
#service=weird:
analyzer:
- 'applicationProtocol'
- 'name'
#service=dpd:
#service=files:
arg:
- 'message'
#auth:
#service=rfb: #RFB does not exist in newer logs, so skipping to cover dns.auth
cipher:
- 'deviceCustomString4'
- 'message'
#service=kerberos:
#service=ssl:
client:
- 'deviceCustomString5'
#service=kerberos:
#service=ssh:
command:
- 'message'
#service=pop3:
#service=ftp:
#service=irc:
date:
#service=sip:
#service=smtp:
duration:
- 'deviceCustomString4'
#service=conn:
#service=files:
#service=snmp:
from:
- 'message'
#service=kerberos:
#service=smtp:
#is_orig:
#service=file:
#service=pop3:
#local_orig:
#service=conn
#service=files
method:
- 'requestMethod'
#service=http:
#service=sip:
msg:
- 'message'
#service=notice:
#service=pop3:
name:
- 'name'
#service=smb_files:
#service=software:
#service=weird:
path:
- 'filePath'
#service=smb_files:
#service=smb_mapping:
#service=smtp:
reply_msg:
- 'message'
#service=ftp:
#service=radius:
reply_to:
- 'message'
#service=sip:
#service=smtp:
response_body_len:
- 'bytesOut'
#service=http:
#service=sip:
request_body_len:
- 'bytesIn'
#service=http:
#service=sip:
service:
- 'applicationProtocol'
#service=kerberos:
#service=smb_mapping:
status:
- 'message'
#service=pop3:
#service=mqtt:
#service=socks:
status_msg:
- 'message'
#subject:
- 'message'
#service=known_certs:
#service=sip:
#service=smtp:
#service=ssl:
trans_depth:
- 'deviceCustomNumber1'
#service=http:
#service=sip:
#service=smtp:
version:
- 'message'
- 'deviceCustomString2'
#service=gquic:
#service=ntp:
#service=socks:
#service=snmp:
#service=ssh:
#service=tls:
# Conn and Conn Long
#cache_add_rx_ev:
#cache_add_rx_mpg:
#cache_add_rx_new:
#cache_add_tx_ev:
#cache_add_tx_mpg:
#cache_del_mpg:
#cache_entries:
conn_state: deviceSeverity
#corelight_shunted:
#duration: deviceCustomString4
#history:
#id.orig_h.name_src:
#id.orig_h.names_vals:
#id.resp_h.name_src:
#id.resp_h.name_vals:
#local_orig:
#local_resp:
missed_bytes: deviceCustomNumber1
orig_bytes: bytesOut
#orig_cc:
orig_ip_bytes: deviceCustomNumber2
orig_l2_addr: sourceMacAddress
#orig_pkts:
resp_bytes: bytesIn
#resp_cc:
resp_ip_bytes: deviceCustomNumber3
resp_l2_addr: destinationMacAddress
#resp_pkts:
# DCE-RPC Specific
endpoint: message
named_pipe: message
operation: message
#rtt:
# DHCP
domain: message
host_name: message
lease_time: deviceCustomString4
agent_remote_id: message
assigned_addr: message
circuit_id: message
client_message: message
client_software: message
client_fqdn: message
#mac:sourceMacAddress
msg_orig: message
msg_types: message
requested_addr: message
server_addr: message
server_message: message
server_software: message
subscriber_id: message
# DNS
AA: message
#addl: message
auth: message
answers: message
TTLs: message
RA: message
RD: message
rejected: eventOutcome
TC: message
Z: message
qclass: message
qclass_name: deviceCustomString4
qtype: deviceEventClassId
qtype_name:
- deviceCustomString1
- name
query: destinationDnsDomain
rcode_name: message
rcode: message
rtt: message
trans_id: deviceCustomNumber1
# DNP3
fc_reply: message
fc_request: message
iin: message
# DPD
#analyzer:
failure_reason: message
packet_segment: message
# Files
rx_hosts: destinationHostName
tx_hosts: sourceHostName
#analyzer:
#depth:
#duration:
#extracted:
#extracted_cutoff:
#extracted_size:
#entropy:
md5: fileHash
sha1: fileHash
sha256: fileHash
#is_orig:
#local_orig:
#missing_bytes:
filename: fileName
overflow_bytes: bytesOut
#seen_bytes:
source: filePath
total_bytes: bytesIn
#timedout:
# GQUIC/QUIC
cyu: message
cyutags: message
#server_name: message
tag_count: message
#user_agent: deviceCustomString5
#version:
# FTP
#arg: message
#command: message
cwd: message
data_channel.orig_h: message
data_channel.passive: eventOutcome
data_channel.resp_h: message
data_channel.resp_p: deviceCustomNumber1
passive: message
file_size: fileSize
#mime_type: fileType
#password: message
reply_code: deviceEventClassId
#reply_msg: message
#user: sourceUserName
# HTTP
client_header_names: message
cookie_vars: message
flash_version: message
info_code: message
info_msg: message
omniture: message
orig_filenames: fileName
orig_mime_types: fileType
origin: message
#password: message
post_body: message
proxied: message
referrer:
- deviceCustomString4
- requestContext
resp_filenames: fileName
resp_mime_types: fileType
server_header_names: message
status_code: deviceSeverity
#status_msg: message
#trans_depth:
uri_vars: message
#user_agent: deviceCustomString5
#username: sourceUserName
# Intel
file_mime_type: message
file_desc: message
#host:
matched: message
indicator: message
indicator_type: message
node: message
where: message
sources: message
# IRC
dcc_file_name: fileName
dcc_file_size: fileSize
dcc_mime_type: fileType
#command:
nick: message
#user:
value: message
# Kerberos
auth_ticket: message
#cipher: message
#client: message
client_cert_subject: message
error_code: message
error_msg: message
#from: message
forwardable: message
new_ticket: message
renewable: message
request_type: message
server_cert_subject: message
#service: applicationProtocol
#success:
till: message
# Known_Certs
#host: sourceAddress
issuer_subject: deviceCustomString3
#port_num: sourcePort
serial: deviceCustomString4
#subject: message
# Known_Modbus
#host:
device_type: message
# Known_Services
port_proto: transport
#port_num: sourcePort
# Modbus All
delta: message
new_val: message
old_val: message
register: message
# Modbus
func: message
exception: message
track_address: message
# ModBus_Register_Change
#delta: message
#new_val: message
#old_val: message
#register: message
# MQTT_Connect , MQTT_Publish, MQTT_Subscribe
ack: message
#action: message
client_id: message
connect_status: message
from_client: message
granted_qos_level: message
payload: message
payload_len: message
proto_name: message
proto_version: message
qos: message
qos_levels: message
retain: message
#status: message
topic: message
topics: message
will_payload: message
will_topic: message
# MYSQL
#arg: message
cmd: message
response: message
rows: message
#success:
# Notice
actions: deviceEventClassId
#dropped:
#dst: destinationAddress
email_body_sections: message
email_delay_tokens: message
identifier: message
#msg:
n: message
note: message
p: destinationPort
peer_descr: deviceCustomString5
peer_name: deviceCustomString4
#proto: transport
#src: sourceAddress
sub: message
subpress_for: deviceCustomFloatingPoint1
# NTLM
domainname: message
hostname: message
#username: sourceUserName
server_nb_computer_name: message
server_tree_name: message
#success:
server_dns_computer_name: message
# NTP
mode: message
num_exts: message
org_time: message
poll: message
precision: message
rec_time: message
ref_id: message
ref_time: message
root_delay: message
root_disp: message
stratum: message
#version:
xmt_time: message
# OCSP
certStatus: message
hashAlgorithm: message
issuerKeyHash: message
issuerNameHash: message
nextUpdate: message
revokereason: message
revoketime: message
serialNumber: message
thisUpdate: message
# PE
compile_ts: message
has_cert_table: message
has_debug_data: message
has_import_table: message
has_export_table: message
is_64bit: message
is_exe: message
machine: message
os: message
section_names: message
subsystem: message
uses_aslr: message
uses_code_integrity: message
uses_dep: message
uses_seh: message
# POP3
#arg: message
#command: message
current_request: message
current_response: message
data: message
failed_commands: message
has_client_activity: message
#is_orig: message
#msg: message
#password:
pending: message
#status: message
successful_commands: message
#username: sourceUserName
# Radius
connect_info: message
framed_addr: message
#mac:sourceMacAddress
#reply_msg: message
#result:
ttl: message
tunnel_client: message
#username: sourceUserName
# RDP
cert_count: message
cert_permanent: message
cert_type: message
client_build: message
client_dig_product_id: message
client_name: message
cookie: message
desktop_height: message
desktop_width: message
encryption_level: message
encryption_method: message
keyboard_layout: message
requested_color_depth: message
#result:
security_protocol: message
ssl: message
# RFB
#auth:
authentication_method: message
client_major_version: message
client_minor_version: message
desktop_name: message
height: message
server_major_version: message
server_minor_version: message
share_flag: message
width: message
# SIP
call_id: message
content_type: message
#date: message
#method: requestMethod
#reply_to: message
#request_body_len: message
request_from: message
request_path: message
request_to: message
#response_body_len: message
response_from: message
response_path: message
response_to: message
seq: message
#status_code:
#status_msg: message
#subject: message
#trans_depth: deviceCustomNumber1
#uri:
warning: message
#user_agent: deviceCustomString5
# SMB_Files
#action:
#name: fileName
#path: filePath
prev_name: message
size: fileSize
times_accessed: message
times_changed: message
times_created: message
times_modified: message
# SMB_Mapping
native_file_system: message
#path: filePath
share_type: message
#service:
# SMTP
cc: message
#date: message
first_received: message
#from:
helo: message
in_reply_to: message
is_webmail: message
last_reply: message
mailfrom: sourceUserName
#msg_id: message
#path: message
rcptto: message
#reply_to: message
second_received: message
#subject: message
tls: message
to: message
#trans_depth: deviceCustomNumber1
x_originating_ip: message
#user_agent: deviceCustomString5
# SMTP_Links
#host:
#uri:
# SNMP
#duration:
community: message
display_string: message
get_bulk_requests: message
get_requests: message
set_requests: message
up_since: message
#version:
# Socks
#password: message
bound_host: message
bound_name: message
bound_p: message
request_host: message
request_name: message
request_p: message
#status: message
#version: message
# Software
#host:
host_p: sourcePort
version.major: deviceCustomString3
version.minor: deviceCustomString4
version.minor2: message
version.minor3: message
#name:
unparsed_version: message
software_type: deviceEventClassId
#url:
# SSH
#auth_attempts:
auth_success: name
cipher_alg: message
#client: deviceCustomString5
compression_alg:
cshka: message
direction: deviceDirection
hassh: message
hasshAlgorithms: message
hasshServer: message
hasshServerAlgorithms: message
hasshVersion: message
host_key: message
host_key_alg: message
kex_alg: message
mac_alg: message
server: deviceCustomString4
#version:
# SSL / TLS
#cipher: deviceCustomString4
client_issuer: deviceCustomString1
client_subject: sourceUserName
curve: message
established: eventOutcome
issuer: deviceCustomString1
ja3: message
ja3s: message
last_alert: message
next_protocol: message
notary: message
ocsp_status: message
orig_certificate_sha1: message
resp_certificate_sha1: message
resumed: message
#server_name: destinationHostName
#subject: message
valid_ct_logs: message
valid_ct_operators: message
valid_ct_operators_list: message
validation_status: message
#version: deviceCustomString2
version_num: message
# Syslog
facility: message
severity: message
message: message
# Traceroute
#proto: transport
#dst: destinationAddress
#src: sourceAddress
# Tunnel
#action: deviceAction
tunnel_type: name
# Weird
#addl: message
#name: name
notice: message
peer: deviceCustomString4
# X509
basic_constraints.ca: message
basic_constraints.path_len: message
certificate.cn: message
certificate.curve: message
certificate.exponent: message
certificate.issuer: deviceCustomString3
certificate.key_alg: message
certificate.key_length: message
certificate.key_type: message
certificate.not_valid_after: deviceCustomDate2
certificate.not_valid_before: deviceCustomDate1
certificate.serial: message
certificate.sig_alg: message
certificate.subject: message
certificate.version: message
logcert: message
san.dns: message
- destinationDnsDomain
- destinationHost
san.email:
- message
- sourceUserName
san.ip:
- message
- sourceAddress
san.uri:
- requestUrl
- requestUrlQuery
+19
View File
@@ -0,0 +1,19 @@
title: Splunk Windows log source conditions
order: 20
backends:
- crowdstrike
logsources:
windows-sysmon:
product: windows
service: sysmon
conditions:
EventID: 1
process_creation_1:
category: process_creation
product: windows
fieldmappings:
EventID: EventID
CommandLine: Commandline
Command_Line: Commandline
Image: ImageFileName
+69
View File
@@ -0,0 +1,69 @@
title: Elastic Common Schema mapping for proxy and webserver logs including NSM DNS logs (zeek/suricata)
order: 20
backends:
- es-qs
- es-dsl
- elasticsearch-rule
- kibana
- xpack-watcher
- elastalert
- elastalert-dsl
# zeek-category-dns:
# category: dns
# conditions:
# event.dataset: dns
# zeek-dns:
# product: zeek
# service: dns
# conditions:
# event.dataset: dns
defaultindex:
- filebeat-*
# logsourcemerging: or
fieldmappings:
# All Logs Applied Mapping & Taxonomy
dst:
- destination.address
- destination.ip
dst_ip:
- destination.address
- destination.ip
dst_port: destination.port
src:
- source.address
- source.ip
src_ip:
- source.address
- source.ip
src_port: source.port
# DNS Taxonomy
answer: dns.answers.name
c-dns: dns.question.name
parent_domain: dns.question.registered_domain
query: dns.question.name
QueryName: dns.question.name
r-dns: dns.question.name
record_type: dns.answers.type
response: dns.answers
#question_length:
# Zeek DNS specific
AA: dns.AA
addl: dns.addl
answers: dns.answers.name
auth: dns.auth
qclass_name: dns.question.class
qclass: dns.qclass
qtype_name: dns.question.type
qtype: dns.qtype
query: dns.question.name
#question_length: labels.dns.query_length
RA: dns.RA
rcode_name: dns.response_code
rcode: dns.rcode
RD: dns.RD
rejected: dns.rejected
rtt: dns.rtt
TC: dns.TC
trans_id: dns.id
TTLs: dns.answers.ttl
Z: dns.Z
+188 -3
View File
@@ -12,13 +12,198 @@ logsources:
category: proxy
index: filebeat-*
fieldmappings:
c-uri: url.original
# All Logs Applied Mapping & Taxonomy
dst:
- destination.address
- destination.ip
dst_ip:
- destination.address
- destination.ip
dst_port: destination.port
src:
- source.address
- source.ip
src_ip:
- source.address
- source.ip
src_port: source.port
# Web/Proxy Taxonomy
cs-bytes: http.request.body.bytes
cs-cookie-vars: http.cookie_vars
c-uri-extension: url.extension
c-uri-query: url.query
c-uri-stem: url.original
c-uri: url.original
c-useragent: user_agent.original
cs-cookie: http.cookie
cs-host: url.domain
cs-host:
- url.domain
- destination.domain
cs-method: http.request.method
r-dns: url.domain
cs-referrer: http.request.referrer
cs-version: http.version
r-dns:
- destination.domain
- url.domain
sc-bytes: http.response.body.bytes
sc-status: http.response.status_code
# Temporary one off rule name fields
agent.version: http.version
c-ip:
- source.address
- source.ip
clientip:
- source.address
- source.ip
clientIP:
- source.address
- source.ip
dest_domain:
- destination.domain
- url.domain
dest_ip:
- destination.address
- destination.ip
dest_port: destination.port
destination.hostname:
- destination.domain
- url.domain
DestinationAddress:
DestinationHostname:
- destination.domain
- url.domain
DestinationIp:
- destination.address
- destination.ip
DestinationIP:
- destination.address
- destination.ip
DestinationPort: destination.port
dst-ip:
- destination.address
- destination.ip
dstip:
- destination.address
- destination.ip
dstport: destination.port
Host:
- destination.domain
- url.domain
host:
- destination.domain
- url.domain
HostVersion: http.version
http_host:
- destination.domain
- url.domain
http_uri: url.original
http_url: url.original
http_user_agent: user_agent.original
http.request.url-query-params: url.original
HttpMethod: http.request.method
in_url: url.original
parent_domain:
- url.registered_domain
- destination.registered_domain
post_url_parameter: url.original
Request Url: url.original
request_url: url.original
request_URL: url.original
RequestUrl: url.original
response: http.response.status_code
resource.url: url.original
resource.URL: url.original
sc_status: http.response.status_code
sender_domain:
- destination.domain
- url.domain
service.response_code: http.response.status_code
source:
- source.address
- source.ip
SourceAddr:
- source.address
- source.ip
SourceAddress:
- source.address
- source.ip
SourceIP:
- source.address
- source.ip
SourceIp:
- source.address
- source.ip
SourceNetworkAddress:
- source.address
- source.ip
SourcePort: source.port
srcip:
- source.address
- source.ip
Status: http.response.status_code
status: http.response.status_code
url: url.original
URL: url.original
url_query: url.original
url.query: url.original
uri_path: url.original
user_agent: user_agent.original
user_agent.name: user_agent.original
user-agent: user_agent.original
User-Agent: user_agent.original
useragent: user_agent.original
UserAgent: user_agent.original
web_dest:
- url.domain
- destination.domain
web.dest:
- url.domain
- destination.domain
Web.dest:
- url.domain
- destination.domain
web.host:
- url.domain
- destination.domain
Web.host:
- url.domain
- destination.domain
web_method: http.request.method
Web_method: http.request.method
web.method: http.request.method
Web.method: http.request.method
web_src:
- source.address
- source.ip
web_status: http.response.status_code
Web_status: http.response.status_code
web.status: http.response.status_code
Web.status: http.response.status_code
web_uri: url.original
web_url: url.original
# Zeek HTTP as Proxy/Web
client_header_names: http.client_header_names
cookie_vars: http.cookie_vars
flash_version: http.flash_version
info_code: http.info_code
info_msg: http.info_msg
method: http.request.method
omniture: http.omniture
orig_filenames: http.orig_filenames
orig_mime_types: http.orig_mime_types
origin: http.origin
#password: source.user.password
post_body: http.post_body
proxied: http.proxied
referrer: http.request.referrer
request_body_len: http.request.body.bytes
resp_filenames: http.resp_filenames
resp_mime_types: http.resp_mime_types
response_body_len: http.response.body.bytes
server_header_names: http.server_header_names
status_code: http.response.status_code
status_msg: http.status_msg
trans_depth: http.trans_depth
uri_vars: http.uri_vars
username: source.user.name
version: http.version
+1182
View File
@@ -0,0 +1,1182 @@
title: Corelight Zeek and Corelight Opensource Zeek Elastic Common Schema (ECS) implementation
description: Uses the mappings as created by Corelight here https://github.com/corelight/ecs-mapping
order: 20
backends:
- es-qs
- corelight_es-qs
- es-dsl
- elasticsearch-rule
- corelight_elasticsearch-rule
- kibana
- corelight_kibana
- xpack-watcher
- corelight_xpack-watcher
- elastalert
- elastalert-dsl
logsources:
zeek:
product: zeek
index: '*ecs-*'
#'*ecs-corelight*'
#'*ecs-zeek-*
zeek-category-accounting:
category: accounting
rewrite:
product: zeek
service: syslog
zeek-category-firewall:
category: firewall
conditions:
event.dataset: conn
zeek-category-dns:
category: dns
conditions:
event.dataset: dns
zeek-category-proxy:
category: proxy
rewrite:
product: zeek
service: http
zeek-category-webserver:
category: webserver
conditions:
event.dataset: http
rewrite:
product: zeek
service: http
zeek-conn:
product: zeek
service: conn
conditions:
event.dataset: conn
zeek-conn_long:
product: zeek
service: conn_long
conditions:
event.dataset: conn_long
zeek-dce_rpc:
product: zeek
service: dce_rpc
conditions:
event.dataset: dce_rpc
zeek-dns:
product: zeek
service: dns
conditions:
event.dataset: dns
zeek-dnp3:
product: zeek
service: dnp3
conditions:
event.dataset: dnp3
zeek-dpd:
product: zeek
service: dpd
conditions:
event.dataset: dpd
zeek-files:
product: zeek
service: files
conditions:
event.dataset: files
zeek-ftp:
product: zeek
service: ftp
conditions:
event.dataset: ftp
zeek-gquic:
product: zeek
service: gquic
conditions:
event.dataset: gquic
zeek-http:
product: zeek
service: http
conditions:
event.dataset: http
zeek-http2:
product: zeek
service: http2
conditions:
event.dataset: http2
zeek-intel:
product: zeek
service: intel
conditions:
event.dataset: intel
zeek-irc:
product: zeek
service: irc
conditions:
event.dataset: irc
zeek-kerberos:
product: zeek
service: kerberos
conditions:
event.dataset: kerberos
zeek-known_certs:
product: zeek
service: known_certs
conditions:
event.dataset: known_certs
zeek-known_hosts:
product: zeek
service: known_hosts
conditions:
event.dataset: known_hosts
zeek-known_modbus:
product: zeek
service: known_modbus
conditions:
event.dataset: known_modbus
zeek-known_services:
product: zeek
service: known_services
conditions:
event.dataset: known_services
zeek-modbus:
product: zeek
service: modbus
conditions:
event.dataset: modbus
zeek-modbus_register_change:
product: zeek
service: modbus_register_change
conditions:
event.dataset: modbus_register_change
zeek-mqtt_connect:
product: zeek
service: mqtt_connect
conditions:
event.dataset: mqtt_connect
zeek-mqtt_publish:
product: zeek
service: mqtt_publish
conditions:
event.dataset: mqtt_publish
zeek-mqtt_subscribe:
product: zeek
service: mqtt_subscribe
conditions:
event.dataset: mqtt_subscribe
zeek-mysql:
product: zeek
service: mysql
conditions:
event.dataset: mysql
zeek-notice:
product: zeek
service: notice
conditions:
event.dataset: notice
zeek-ntlm:
product: zeek
service: ntlm
conditions:
event.dataset: ntlm
zeek-ntp:
product: zeek
service: ntp
conditions:
event.dataset: ntp
zeek-ocsp:
product: zeek
service: ntp
conditions:
event.dataset: ocsp
zeek-pe:
product: zeek
service: pe
conditions:
event.dataset: pe
zeek-pop3:
product: zeek
service: pop3
conditions:
event.dataset: pop3
zeek-radius:
product: zeek
service: radius
conditions:
event.dataset: radius
zeek-rdp:
product: zeek
service: rdp
conditions:
event.dataset: rdp
zeek-rfb:
product: zeek
service: rfb
conditions:
event.dataset: rfb
zeek-sip:
product: zeek
service: sip
conditions:
event.dataset: sip
zeek-smb_files:
product: zeek
service: smb_files
conditions:
event.dataset: smb_files
zeek-smb_mapping:
product: zeek
service: smb_mapping
conditions:
event.dataset: smb_mapping
zeek-smtp:
product: zeek
service: smtp
conditions:
event.dataset: smtp
zeek-smtp_links:
product: zeek
service: smtp_links
conditions:
event.dataset: smtp_links
zeek-snmp:
product: zeek
service: snmp
conditions:
event.dataset: snmp
zeek-socks:
product: zeek
service: socks
conditions:
event.dataset: socks
zeek-software:
product: zeek
service: software
conditions:
event.dataset: software
zeek-ssh:
product: zeek
service: ssh
conditions:
event.dataset: ssh
zeek-ssl:
product: zeek
service: ssl
conditions:
event.dataset: tls
zeek-tls: # In case people call it TLS even though orig log is called ssl, but dataset is tls so may cause confusion so cover that
product: zeek
service: tls
conditions:
event.dataset: tls
zeek-syslog:
product: zeek
service: syslog
conditions:
event.dataset: syslog
zeek-tunnel:
product: zeek
service: tunnel
conditions:
event.dataset: tunnel
zeek-traceroute:
product: zeek
service: traceroute
conditions:
event.dataset: traceroute
zeek-weird:
product: zeek
service: weird
conditions:
event.dataset: weird
zeek-x509:
product: zeek
service: x509
conditions:
event.dataset: x509
zeek-ip_search:
product: zeek
service: network
conditions:
event.dataset:
- conn
- conn_long
- dce_rpc
- dhcp
- dnp3
- dns
- ftp
- gquic
- http
- irc
- kerberos
- modbus
- mqtt_connect
- mqtt_publish
- mqtt_subscribe
- mysql
- ntlm
- ntp
- radius
- rfb
- sip
- smb_files
- smb_mapping
- smtp
- smtp_links
- snmp
- socks
- ssh
- tls #SSL
- tunnel
- weird
defaultindex: '*ecs-*'
fieldmappings:
# All Logs Applied Mapping & Taxonomy
dst: destination.ip
dst_ip: destination.ip
dst_port: destination.port
host: host.ip
inner_vlan: network.vlan.inner.id
mac: source.mac
mime_type: file.mime_type
network_application: network.protocol
network_community_id: network.community_id
network_protocol: network.transport
password: source.user.password
port_num: labels.known.port
proto: network.transport
result: event.outcome
rtt: event.duration
server_name: destination.domain
src: source.ip
src_ip: source.ip
src_port: source.port
success: event.outcome
uri: url.original
user: source.user.name
username: source.user.name
user_agent: user_agent.original
vlan: network.vlan.id
# DNS matching Taxonomy & DNS Category
answer: dns.answers.name
question_length: labels.dns.query_length
record_type: dns.question.type
parent_domain: dns.question.registered_domain
# HTTP matching Taxonomy & Web/Proxy Category
cs-bytes: http.request.body.bytes
cs-cookie: http.cookie_vars
r-dns:
- url.domain
- destination.domain
sc-bytes: http.response.body.bytes
sc-status: http.response.status_code
c-uri: url.original
c-uri-extension: url.extension
c-uri-query: url.query
c-uri-stem: url.original
c-useragent: user_agent.original
cs-host:
- url.domain
- destination.domain
cs-method: http.request.method
cs-referrer: http.request.referrer
cs-version: http.version
# All log UIDs
cert_chain_fuids: log.id.cert_chain_fuids
client_cert_chain_fuids: log.id.client_cert_chain_fuids
client_cert_fuid: log.id.client_cert_fuid
conn_uids: log.id.conn_uids
fid: log.id.fid
fuid: log.id.fuid
fuids: log.id.fuids
id: log.id.id
orig_fuids: log.id.orig_fuids
parent_fuid: log.id.parent_fuid
related_fuids: log.id.related_fuids
resp_fuids: log.id.resp_fuids
server_cert_fuid: log.id.server_cert_fuid
tunnel_parents: log.id.tunnel_parents
uid: log.id.uid
uids: log.id.uids
uuid: log.id.uuid
# Overlapping fields/mappings (aka: shared fields)
action:
#- smb.action
- '*.action'
#service=smb_files: smb.action
#service=mqtt: mqtt.action
#service=tunnel: tunnel.action
addl:
#- weird.addl
- '*.addl'
#service=dns: dns.addl
#service=weird: weird.addl
analyzer:
#- dpd.analyzer
- '*.analyzer'
#service=dpd: dpd.analyzer
#service=files: files.analyzer
arg:
#- ftp.arg
- '*.arg'
#service=ftp: ftp.arg
#service=ftp: pop3.arg
#service=msqyl: mysql.arg
#auth:
#service=rfb: rfb.auth #RFB does not exist in newer logs, so skipping to cover dns.auth
cipher:
#- kerberos.cipher
- '*.client'
#service=kerberos: kerberos.cipher
#service=ssl: tls.cipher
client:
#- ssh.client
- '*.client'
#service=kerberos: kerberos.client
#service=ssh: ssh.client
command:
#- ftp.command
- '*.command'
#service=pop3: pop3.command
#service=ftp: ftp.command
#service=irc: irc.command
date:
#- smtp.date
- '*.date'
#service=sip: sip.date
#service=smtp: smtp.date
duration:
- event.duration
#- '*.duration'
#service=conn: event.duration
#service=files: files.duration
#service=snmp: event.duration
from:
#- smtp.from
- '*.from'
#service=kerberos: kerberos.from
#service=smtp: smtp.from
is_orig:
- '*.is_orig'
#service=file: file.is_orig
#service=pop3: pop3.is_orig
local_orig:
- '*.local_orig'
#service=conn conn.local_orig
#service=files file.local_orig
method:
- http.request.method
#service=http: http.request.method
#service=sip: sip.method
msg:
- notice.msg
#service=notice: notice.msg
#service=pop3: pop3.msg
name:
- file.name
#- '*.name'
#service=smb_files: file.name
#service=software: software.name
#service=weird: weird.name
path:
- file.path
#- '*.path'
#service=smb_files: file.path
#service=smb_mapping: file.path
#service=smtp: smtp.path
reply_msg:
#- ftp.reply_msg
- '*.reply_msg'
#service=ftp: ftp.reply_msg
#service=radius: radius.reply_msg
reply_to:
#- smtp.reply_to
- '*.reply_to'
#service=sip: sip.reply_to
#service=smtp: smtp.reply_to
response_body_len:
- http.response.body.bytes
#service=http: http.response.body.bytes
#service=sip: sip.response_body_len
request_body_len:
- http.request.body.bytes
#service=http: http.response.body.bytes
#service=sip: sip.request_body_len
service:
#- kerberos.service
- '*.service'
#service=kerberos: kerberos.service
#service=smb_mapping: smb.service
status:
#- socks.status
- '*.status'
#service=pop3: pop3.status
#service=mqtt: mqtt.status
#service=socks: socks.status
status_code:
- 'http.response.status_code'
#service=http: http.response.status_code
#service=sip: sip.status_code
status_msg:
- http.status_msg
#- '*.status_msg'
#service=http: http.status_msg
#service=sip: sip.status_msg
subject:
#- smtp.subject
- '*.subject'
#service=known_certs: known_certs.subject
#service=sip: sip.subject
#service=smtp: smtp.subject
#service=ssl: tls.subject
trans_depth:
#- http.trans_depth
- '*.trans_depth'
#service=http: http.trans_depth
#service=sip: sip.trans_depth
#service=smtp: smtp.trans_depth
version:
#- tls.version
- '*.version'
#service=gquic: gquic.version
#service=ntp: ntp.version
#service=socks: socks.version
#service=snmp: snmp.version
#service=ssh: ssh.version
#service=tls: tls.version
# Conn and Conn Long
cache_add_rx_ev: conn.cache_add_rx_ev
cache_add_rx_mpg: conn.cache_add_rx_mpg
cache_add_rx_new: conn.cache_add_rx_new
cache_add_tx_ev: conn.cache_add_tx_ev
cache_add_tx_mpg: conn.cache_add_tx_mpg
cache_del_mpg: conn.cache_del_mpg
cache_entries: conn.cache_entries
conn_state: conn.conn_state
corelight_shunted: conn.corelight_shunted
history: conn.history
id.orig_h.name_src: conn.id.orig_h_name.src
id.orig_h.names_vals: conn.id.orig_h_names.vals
id.resp_h.name_src: conn.id.resp_h_name.src
id.resp_h.name_vals: conn.id.resp_h_name.vals
#local_orig: conn.local_orig
local_resp: conn.local_resp
missed_bytes: conn.missed_bytes
orig_bytes: source.bytes
orig_cc: source.geo.country_iso_code
orig_ip_bytes: conn.orig_ip_bytes
orig_l2_addr: source.mac
orig_pkts: source.packets
resp_bytes: destination.bytes
resp_cc: destination.geo.country_iso_code
resp_ip_bytes: conn.resp.ip_bytes
resp_l2_addr: destination.mac
resp_pkts: destination.packets
# DCE-RPC Specific
endpoint: dce_rpc.endpoint
named_pipe: dce_rpc.named_pipe
operation: dce_rpc.operation
#rtt: dce_rpc.rtt
# DHCP
domain: source.domain
host_name: source.hostname
lease_time: dhcp.lease_time
agent_remote_id: dhcp.agent_remote_id
assigned_addr: dhcp.assigned_addr
circuit_id: dhcp.circuit_id
client_message: dhcp.client_message
client_software: dhcp.client_software
client_fqdn: source.fqdn
#mac: source.mac
msg_orig: dhcp.msg_orig
msg_types: dhcp.msg_types
requested_addr: dhcp.requested_addr
server_addr: destination.ip
server_message: dhcp.server_message
server_software: dhcp.server_software
subscriber_id: dhcp.subscriber_id
# DNS
AA: dns.AA
#addl: dns.addl
auth: dns.auth
answers: dns.answers.name
TTLs: dns.answers.ttl
RA: dns.RA
RD: dns.RD
rejected: dns.rejected
TC: dns.TC
Z: dns.Z
qclass: dns.qclass
qclass_name: dns.question.class
qtype: dns.qtype
qtype_name: dns.question.type
query: dns.question.name
rcode_name: dns.response_code
rcode: dns.rcode
#rtt: dns.rtt
trans_id: dns.id
# DNP3
fc_reply: dnp3.fc_reply
fc_request: dnp3.fc_request
iin: dnp3.inn
# DPD
#analyzer: dpd.analyzer
failure_reason: dpd.failure_reason
packet_segment: dpd.packet_segment
# Files
rx_hosts: source.ip
tx_hosts: destination.ip
#analyzer: files.analyzer
depth: files.depth
#duration: files.duration
extracted: files.extracted
extracted_cutoff: files.extracted_cutoff
extracted_size: files.extracted_size
entropy: files.entropy
md5: file.hash.md5
sha1: file.hash.sha1
sha256: file.hash.sha256
#is_orig: file.is_orig
#local_orig: files.local_orig
missing_bytes: files.missing_bytes
filename: file.name
overflow_bytes: files.overflow_bytes
seen_bytes: files.seen_bytes
source: network.protocol
total_bytes: file.size
timedout: files.timedout
# GQUIC/QUIC
cyu: gquic.cyu
cyutags: gquic.cyutags
#server_name: destination.domain
tag_count: gquic.tag_count
#user_agent: user_agent.original
#version: gquic.version
# FTP
#arg: ftp.arg
#command: ftp.command
cwd: ftp.cwd
data_channel.orig_h: ftp.data_channel.orig_h
data_channel.passive: ftp.data_channel.passive
data_channel.resp_h: ftp.data_channel.resp_h
data_channel.resp_p: ftp.data_channel.resp_p
passive: ftp.passive
file_size: file.size
#mime_type: file.mime_type
#password: ftp.password
reply_code: ftp.reply_code
#reply_msg: ftp.reply_msg
#user: source.user.name
# HTTP
client_header_names: http.client_header_names
cookie_vars: http.cookie_vars
flash_version: http.flash_version
info_code: http.info_code
info_msg: http.info_msg
#method: http.request.method
omniture: http.omniture
orig_filenames: http.orig_filenames
orig_mime_types: http.orig_mime_types
origin: http.origin
#password: source.user.password
#response_body_len: http.response.body.bytes
#request_body_len: http.request.body.bytes
referrer: http.request.referrer
post_body: http.post_body
proxied: http.proxied
resp_filenames: http.resp_filenames
resp_mime_types: http.resp_mime_types
server_header_names: http.server_header_names
#status_code: http.response.status_code
#status_msg: http.status_msg
#trans_depth: http.trans_depth
uri_vars: http.uri_vars
#user_agent: user_agent.original
#username: source.user.name
# Intel
file_mime_type: file.mime_type
file_desc: intel.file_desc
#host: host.ip
matched: intel.matched
indicator: intel.seen.indicator
indicator_type: intel.seen.indicator_type
node: intel.seen.node
where: intel.seen.where
sources: intel.seen.sources
# IRC
dcc_file_name: file.name
dcc_file_size: file.size
dcc_mime_type: file.mime_type
#command: irc.command
nick: irc.nick
#user: source.user.name
value: irc.command
# Kerberos
auth_ticket: kerberos.auth_ticket
#cipher: kerberos.cipher
#client: kerberos.client
client_cert_subject: kerberos.client_cert_subject
error_code: kerberos.error_code
error_msg: kerberos.error_msg
#from: kerberos.from
forwardable: kerberos.forwardable
new_ticket: kerberos.new_ticket
renewable: kerberos.renewable
request_type: kerberos.request_type
server_cert_subject: kerberos.server_cert_subject
#service: kerberos.service
#success: event.outcome
till: kerberos.till
# Known_Certs
#host: host.ip
issuer_subject: known_certs.issuer_subject
#port_num: labels.known.port
serial: known_certs.serial
#subject: known_certs.subject
# Known_Modbus
#host: host.ip
device_type: known_modbus.device_type
# Known_Services
port_proto: network.transport
#port_num: labels.known.port
# Modbus All
delta: modbus.delta
new_val: modbus.new_val
old_val: modbus.old_val
register: modbus.register
# Modbus
func: modbus.func
exception: modbus.exception
track_address: modbus.track_address
# ModBus_Register_Change
#delta: modbus.delta
#new_val: modbus.new_val
#old_val: modbus.old_val
#register: modbus.register
# MQTT_Connect , MQTT_Publish, MQTT_Subscribe
ack: mqtt.ack
#action: mqtt.action
client_id: mqtt.client_id
connect_status: mqtt.connect_status
from_client: mqtt.from_client
granted_qos_level: mqtt.granted_qos_level
payload: mqtt.payload
payload_len: mqtt.payload_len
proto_name: mqtt.proto_name
proto_version: mqtt.proto_version
qos: mqtt.qos
qos_levels: mqtt.qos_levels
retain: mqtt.retain
#status: mqtt.status
topic: mqtt.topic
topics: mqtt.topics
will_payload: mqtt.will_payload
will_topic: mqtt.will_topic
# MYSQL
#arg: mysql.arg
cmd: mysql.command
response: mysql.response
rows: mysql.rows
#success: event.outcome
# Notice
actions: notice.actions
dropped: notice.dropped
#dst: destination.ip
email_body_sections: notice.email_body_sections
email_delay_tokens: notice.email_delay_tokens
identifier: notice.identifier
#msg: notice.msg
n: notice.n
note: notice.note
p: destination.port
peer_descr: notice.peer_descr
peer_name: notice.peer_name
#proto: network.transport
#src: source.ip
sub: notice.sub
subpress_for: notice.subpress_for
# NTLM
domainname: ntlm.domainname
hostname: ntlm.hostname
#username: source.user.name
server_nb_computer_name: ntlm.server_nb_computer_name
server_tree_name: ntlm.server_tree_name
#success: event.outcome
server_dns_computer_name: ntlm.server_dns_computer_name
# NTP
mode: ntp.mode
num_exts: ntp.num_exts
org_time: ntp.org_time
poll: ntp.poll
precision: ntp.precision
rec_time: ntp.rec_time
ref_id: ntp.ref_id
ref_time: ntp.ref_time
root_delay: ntp.root_delay
root_disp: ntp.root_disp
stratum: ntp.stratum
#version: ntp.version
xmt_time: ntp.xmt_time
# OCSP
certStatus: oscp.certStatus
hashAlgorithm: oscp.hashAlgorithm
issuerKeyHash: oscp.issuerKeyHash
issuerNameHash: oscp.issuerNameHash
nextUpdate: oscp.nextUpdate
revokereason: oscp.revokereason
revoketime: oscp.revoketime
serialNumber: oscp.serialNumber
thisUpdate: oscp.thisUpdate
# PE
compile_ts: pe.compile_ts
has_cert_table: pe.has_cert_table
has_debug_data: pe.has_debug_data
has_import_table: pe.has_import_table
has_export_table: pe.has_export_table
is_64bit: pe.is_64bit
is_exe: pe.is_exe
machine: pe.machine
os: pe.os
section_names: pe.section_names
subsystem: pe.subsystem
uses_aslr: pe.uses_aslr
uses_code_integrity: pe.uses_code_integrity
uses_dep: pe.uses_dep
uses_seh: pe.uses_seh
# POP3
#arg: pop3.arg
#command: pop3.command
current_request: pop3.current_request
current_response: pop3.current_response
data: pop3.data
failed_commands: pop3.failed_commands
has_client_activity: pop3.has_client_activity
#is_orig: pop3.is_orig
#msg: pop3.msg
#password: source.user.password
pending: pop3.pending
#status: pop3.status
successful_commands: pop3.successful_commands
#username: source.user.name
# Radius
connect_info: radius.connect_info
framed_addr: radius.framed_addr
#mac: source.mac
#reply_msg: radius.reply_msg
#result: event.outcome
ttl: event.duration
tunnel_client: radius.tunnel_client
#username: source.user.name
# RDP
cert_count: rdp.cert_count
cert_permanent: rdp.cert_permanent
cert_type: rdp.cert_type
client_build: rdp.client_build
client_dig_product_id: rdp.client_dig_product_id
client_name: source.hostname
cookie: rdp.cookie
desktop_height: rdp.desktop_height
desktop_width: rdp.desktop_width
encryption_level: rdp.encryption_level
encryption_method: rdp.encryption_method
keyboard_layout: rdp.keyboard_layout
requested_color_depth: rdp.requested_color_depth
#result: event.outcome
security_protocol: rdp.security_protocol
ssl: rdp.ssl
# RFB
#auth: event.outcome
authentication_method: rfb.authentication_method
client_major_version: rfb.client_major_version
client_minor_version: rfb.client_minor_version
desktop_name: destination.hostname
height: rfb.height
server_major_version: rfb.server_major_version
server_minor_version: rfb.server_minor_version
share_flag: rfb.share_flag
width: rfb.width
# SIP
call_id: sip.call_id
content_type: sip.content_type
#date: sip.date
#method: sip.method
#reply_to: sip.reply_to
#request_body_len: sip.request_body_len
request_from: sip.request_from
request_path: sip.request_path
request_to: sip.request_to
#response_body_len: sip.response_body_len
response_from: sip.response_from
response_path: sip.response_path
response_to: sip.response_to
seq: sip.seq
#status_code: sip.status_code
#status_msg: sip.status_msg
#subject: sip.subject
#trans_depth: sip.trans_depth
#uri: url.original
warning: sip.warning
#user_agent: user_agent.original
# SMB_Files
#action: smb.action
#name: file.name
#path: file.path
prev_name: smb.prev_name
size: file.size
times_accessed: file.accessed
times_changed: file.ctime
times_created: file.created
times_modified: file.mtime
# SMB_Mapping
native_file_system: smb.native_file_system
#path: file.path
share_type: smb.share_type
#service: smb.service
# SMTP
cc: smtp.cc
#date: smtp.date
first_received: smtp.first_received
#from: smtp.from
helo: smtp.helo
in_reply_to: smtp.in_reply_to
is_webmail: smtp.is_webmail
last_reply: smtp.last_reply
mailfrom: smtp.mailfrom
msg_id: smtp.msg_id
#path: smtp.path
rcptto: smtp.rcptto
#reply_to: smtp.reply_to
second_received: smtp.second_received
#subject: smtp.subject
tls: smtp.tls
to: smtp.to
#trans_depth: smtp.trans_depth
x_originating_ip: smtp.x_originating_ip
#user_agent: user_agent.original
# SMTP_Links
#cs-host: url.domain
#c-uri: url.original
# SNMP
#duration: event.duration
community: snmp.community
display_string: snmp.display_string
get_bulk_requests: snmp.get_bulk_requests
get_requests: snmp.get_requests
set_requests: snmp.set_requests
up_since: snmp.up_since
#version: snmp.version
# Socks
#password: source.user.password
bound_host: socks.bound_host
bound_name: socks.bound_name
bound_p: socks.bound_p
request_host: socks.request_host
request_name: socks.request_name
request_p: socks.request_p
#status: socks.status
#version: socks.version
# Software
#host: host.ip
host_p: software.host_port
version.major: software.version.major
version.minor: software.version.minor
version.minor2: software.version.minor2
version.minor3: software.version.minor3
#name: software.name
unparsed_version: software.unparsed_version
software_type: software.software_type
#url: url.original
# SSH
auth_attempts: ssh.auth_attempts
auth_success: event.outcome
cipher_alg: ssh.cipher_alg
#client: ssh.client
compression_alg: ssh.compression_alg
cshka: ssh.cshka
direction: network.direction
hassh: ssh.hassh
hasshAlgorithms: ssh.hasshAlgorithms
hasshServer: ssh.hasshServer
hasshServerAlgorithms: ssh.hasshServerAlgorithms
hasshVersion: ssh.hasshVersion
host_key: ssh.host_key
host_key_alg: ssh.host_key_alg
kex_alg: ssh.kex_alg
mac_alg: ssh.mac_alg
server: ssh.server
#version: ssh.version
# SSL / TLS
#cipher: tls.cipher
client_issuer: tls.client.issuer
client_subject: tls.client.subject
curve: tls.curve
established: tls.established
issuer: tls.server.issuer
ja3: tls.client.ja3
ja3s: tls.client.ja3s
last_alert: ssl.last_alert
next_protocol: tls.next_protocol
notary: ssl.notary
ocsp_status: ssl.oscp_status
orig_certificate_sha1: tls.client.hash.sha1
resp_certificate_sha1: tls.server.hash.sha1
resumed: tls.resumed
#server_name: tls.client.server_name
#subject: tls.server.subject
valid_ct_logs: ssl.valid_ct_logs
valid_ct_operators: ssl.validct_operators
valid_ct_operators_list: ssl.valid_ct_operators_list
validation_status: ssl.validation_status
#version: tls.version
version_num: ssl.version_num
# Syslog
facility: log.syslog.facility.name
severity: log.syslog.severity.name
message: syslog.message
# Traceroute
#proto: network.transport
#dst: destination.ip
#src: source.ip
# Tunnel
#action: tunnel.action
tunnel_type: tunnel.tunnel_type
# Weird
#addl: weird.addl
#name: weird.name
notice: weird.notice
peer: weird.peer
# X509
basic_constraints.ca: x509.certificate.basic_constraints_ca
basic_constraints.path_len: x509.certificate.basic_constraints_path_length
certificate.cn: x509.certificate.cn
certificate.curve: x509.certificate.curve
certificate.exponent: x509.certificate.exponent
certificate.issuer: x509.certificate.issuer
certificate.key_alg: x509.certificate.key_alg
certificate.key_length: x509.certificate.key_length
certificate.key_type: x509.certificate.key_type
certificate.not_valid_after: x509.certificate.not_valid_after
certificate.not_valid_before: x509.certificate.not_valid_before
certificate.serial: x509.certificate.serial
certificate.sig_alg: x509.certificate.sig_alg
certificate.subject: x509.certificate.subject
certificate.version: x509.certificate.version
logcert: x509.logcert
san.dns: x509.san.dns
san.email: x509.san.email
san.ip: x509.san.ip
san.uri: x509.san.url
# Temporary one off rule name fields
cs-uri: url.original
# destination.domain:
# destination.ip:
# destination.port:
# http.response.status_code
# http.request.body.content
# source.domain:
# source.ip:
# source.port:
agent.version: http.version
c-ip: source.ip
clientip: source.ip
clientIP: source.ip
dest_domain:
- destination.domain
- url.domain
dest_ip: destination.ip
dest_port: destination.port
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname:
- destination.domain
- url.domain
DestinationAddress:
DestinationHostname:
- destination.domain
- url.domain
DestinationIp: destination.ip
DestinationIP: destination.ip
DestinationPort: destination.port
dst-ip: destination.ip
dstip: destination.ip
dstport: destination.port
Host:
- destination.domain
- url.domain
#host:
# - destination.domain
# - url.domain
HostVersion: http.version
http_host:
- destination.domain
- url.domain
http_uri: url.original
http_url: url.original
http_user_agent: user_agent.original
http.request.url-query-params: url.original
HttpMethod: http.request.method
in_url: url.original
#parent_domain:
# - url.registered_domain
# - destination.registered_domain
post_url_parameter: url.original
Request Url: url.original
request_url: url.original
request_URL: url.original
RequestUrl: url.original
#response: http.response.status_code
resource.url: url.original
resource.URL: url.original
sc_status: http.response.status_code
sender_domain:
- destination.domain
- url.domain
service.response_code: http.response.status_code
SourceAddr:
- source.address
- source.ip
SourceAddress: source.ip
SourceIP: source.ip
SourceIp: source.ip
SourceNetworkAddress:
- source.address
- source.ip
SourcePort: source.port
srcip: source.ip
Status: http.response.status_code
#status: http.response.status_code
url: url.original
URL: url.original
url_query: url.original
url.query: url.original
uri_path: url.original
#user_agent: user_agent.original
user_agent.name: user_agent.original
user-agent: user_agent.original
User-Agent: user_agent.original
useragent: user_agent.original
UserAgent: user_agent.original
User Agent: user_agent.original
web_dest:
- url.domain
- destination.domain
web.dest:
- url.domain
- destination.domain
Web.dest:
- url.domain
- destination.domain
web.host:
- url.domain
- destination.domain
Web.host:
- url.domain
- destination.domain
web_method: http.request.method
Web_method: http.request.method
web.method: http.request.method
Web.method: http.request.method
web_src: source.ip
web_status: http.response.status_code
Web_status: http.response.status_code
web.status: http.response.status_code
Web.status: http.response.status_code
web_uri: url.original
web_url: url.original
@@ -0,0 +1,2 @@
defaultindex:
- filebeat-*
@@ -0,0 +1,2 @@
defaultindex:
- logstash-*
+3
View File
@@ -0,0 +1,3 @@
defaultindex:
- logstash-*
- filebeat-*
+15
View File
@@ -0,0 +1,15 @@
logsources:
apache:
category: webserver
index: logstash-apache-*
webapp-error:
category: application
index: logstash-apache_error-*
linux-auth:
product: linux
service: auth
index: logstash-auth-*
fieldmappings:
client_ip: clientip
url: request
defaultindex: logstash-*
+30
View File
@@ -0,0 +1,30 @@
logsources:
windows:
product: windows
index: logstash-windows-*
windows-application:
product: windows
service: application
conditions:
EventLog: Application
windows-security:
product: windows
service: security
conditions:
EventLog: Security
windows-sysmon:
product: windows
service: sysmon
conditions:
EventLog: Microsoft-Windows-Sysmon
windows-dns-server:
product: windows
service: dns-server
conditions:
EventLog: 'DNS Server'
windows-driver-framework:
product: windows
service: driver-framework
conditions:
source: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational'
defaultindex: logstash-*
+95
View File
@@ -0,0 +1,95 @@
logsources:
windows:
product: windows
index: <winlogbeat-{now/d}>
windows-application:
product: windows
service: application
conditions:
log_name: Application
windows-security:
product: windows
service: security
conditions:
log_name: Security
windows-sysmon:
product: windows
service: sysmon
conditions:
log_name: 'Microsoft-Windows-Sysmon/Operational'
windows-dns-server:
product: windows
service: dns-server
conditions:
log_name: 'DNS Server'
windows-driver-framework:
product: windows
service: driver-framework
conditions:
source: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational'
defaultindex: <winlogbeat-{now/d}>
# Extract all field names qith yq:
# yq -r '.detection | del(.condition) | map(keys) | .[][]' $(find sigma/rules/windows -name '*.yml') | sort -u | grep -v ^EventID$ | sed 's/^\(.*\)/ \1: event_data.\1/g'
# Keep EventID! Clean up the list afterwards!
fieldmappings:
EventID: event_id
AccessMask: event_data.AccessMask
AccountName: event_data.AccountName
AllowedToDelegateTo: event_data.AllowedToDelegateTo
AttributeLDAPDisplayName: event_data.AttributeLDAPDisplayName
AuditPolicyChanges: event_data.AuditPolicyChanges
AuthenticationPackageName: event_data.AuthenticationPackageName
CallingProcessName: event_data.CallingProcessName
CallTrace: event_data.CallTrace
CommandLine: event_data.CommandLine
ComputerName: event_data.ComputerName
CurrentDirectory: event_data.CurrentDirectory
Description: event_data.Description
DestinationHostname: event_data.DestinationHostname
DestinationIp: event_data.DestinationIp
DestinationIsIpv6: event_data.DestinationIsIpv6
DestinationPort: event_data.DestinationPort
Details: event_data.Details
EngineVersion: event_data.EngineVersion
EventType: event_data.EventType
FailureCode: event_data.FailureCode
FileName: event_data.FileName
GrantedAccess: event_data.GrantedAccess
GroupName: event_data.GroupName
Hashes: event_data.Hashes
HiveName: event_data.HiveName
HostVersion: event_data.HostVersion
Image: event_data.Image
ImageLoaded: event_data.ImageLoaded
ImagePath: event_data.ImagePath
Imphash: event_data.Imphash
LogonProcessName: event_data.LogonProcessName
LogonType: event_data.LogonType
NewProcessName: event_data.NewProcessName
ObjectClass: event_data.ObjectClass
ObjectName: event_data.ObjectName
ObjectType: event_data.ObjectType
ObjectValueName: event_data.ObjectValueName
ParentCommandLine: event_data.ParentCommandLine
ParentImage: event_data.ParentImage
Path: event_data.Path
PipeName: event_data.PipeName
ProcessName: event_data.ProcessName
Properties: event_data.Properties
ServiceFileName: event_data.ServiceFileName
ServiceName: event_data.ServiceName
ShareName: event_data.ShareName
Signature: event_data.Signature
Source: event_data.Source
SourceImage: event_data.SourceImage
StartModule: event_data.StartModule
Status: event_data.Status
SubjectUserName: event_data.SubjectUserName
TargetFilename: event_data.TargetFilename
TargetImage: event_data.TargetImage
TargetObject: event_data.TargetObject
TicketEncryptionType: event_data.TicketEncryptionType
TicketOptions: event_data.TicketOptions
User: event_data.User
WorkstationName: event_data.WorkstationName
+94
View File
@@ -0,0 +1,94 @@
logsources:
windows:
product: windows
index: winlogbeat-*
windows-application:
product: windows
service: application
conditions:
log_name: Application
windows-security:
product: windows
service: security
conditions:
log_name: Security
windows-sysmon:
product: windows
service: sysmon
conditions:
log_name: 'Microsoft-Windows-Sysmon/Operational'
windows-dns-server:
product: windows
service: dns-server
conditions:
log_name: 'DNS Server'
windows-driver-framework:
product: windows
service: driver-framework
conditions:
source: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational'
defaultindex: winlogbeat-*
# Extract all field names qith yq:
# yq -r '.detection | del(.condition) | map(keys) | .[][]' $(find sigma/rules/windows -name '*.yml') | sort -u | grep -v ^EventID$ | sed 's/^\(.*\)/ \1: event_data.\1/g'
# Keep EventID! Clean up the list afterwards!
fieldmappings:
EventID: event_id
AccessMask: event_data.AccessMask
AccountName: event_data.AccountName
AllowedToDelegateTo: event_data.AllowedToDelegateTo
AttributeLDAPDisplayName: event_data.AttributeLDAPDisplayName
AuditPolicyChanges: event_data.AuditPolicyChanges
AuthenticationPackageName: event_data.AuthenticationPackageName
CallingProcessName: event_data.CallingProcessName
CallTrace: event_data.CallTrace
CommandLine: event_data.CommandLine
ComputerName: event_data.ComputerName
CurrentDirectory: event_data.CurrentDirectory
Description: event_data.Description
DestinationHostname: event_data.DestinationHostname
DestinationIp: event_data.DestinationIp
DestinationIsIpv6: event_data.DestinationIsIpv6
DestinationPort: event_data.DestinationPort
Details: event_data.Details
EngineVersion: event_data.EngineVersion
EventType: event_data.EventType
FailureCode: event_data.FailureCode
FileName: event_data.FileName
GrantedAccess: event_data.GrantedAccess
GroupName: event_data.GroupName
Hashes: event_data.Hashes
HiveName: event_data.HiveName
HostVersion: event_data.HostVersion
Image: event_data.Image
ImageLoaded: event_data.ImageLoaded
ImagePath: event_data.ImagePath
Imphash: event_data.Imphash
LogonProcessName: event_data.LogonProcessName
LogonType: event_data.LogonType
NewProcessName: event_data.NewProcessName
ObjectClass: event_data.ObjectClass
ObjectName: event_data.ObjectName
ObjectType: event_data.ObjectType
ObjectValueName: event_data.ObjectValueName
ParentCommandLine: event_data.ParentCommandLine
ParentImage: event_data.ParentImage
Path: event_data.Path
PipeName: event_data.PipeName
ProcessName: event_data.ProcessName
Properties: event_data.Properties
ServiceFileName: event_data.ServiceFileName
ServiceName: event_data.ServiceName
ShareName: event_data.ShareName
Signature: event_data.Signature
Source: event_data.Source
SourceImage: event_data.SourceImage
StartModule: event_data.StartModule
Status: event_data.Status
SubjectUserName: event_data.SubjectUserName
TargetFilename: event_data.TargetFilename
TargetImage: event_data.TargetImage
TargetObject: event_data.TargetObject
TicketEncryptionType: event_data.TicketEncryptionType
TicketOptions: event_data.TicketOptions
User: event_data.User
WorkstationName: event_data.WorkstationName
+468
View File
@@ -0,0 +1,468 @@
title: Zeek field mappings for default collection of JSON logs with no parsing/normalization done and sending into logstash-*index
order: 20
backends:
- es-qs
- es-dsl
- elasticsearch-rule
- kibana
- xpack-watcher
- elastalert
- elastalert-dsl
logsources:
zeek:
product: zeek
index: 'logstash*'
zeek-category-accounting:
category: accounting
rewrite:
product: zeek
service: syslog
zeek-category-firewall:
category: firewall
conditions:
'@stream': conn
zeek-category-dns:
category: dns
conditions:
'@stream': dns
zeek-category-proxy:
category: proxy
rewrite:
product: zeek
service: http
zeek-category-webserver:
category: webserver
conditions:
'@stream': http
rewrite:
product: zeek
service: http
zeek-conn:
product: zeek
service: conn
conditions:
'@stream': conn
zeek-conn_long:
product: zeek
service: conn_long
conditions:
'@stream': conn_long
zeek-dce_rpc:
product: zeek
service: dce_rpc
conditions:
'@stream': dce_rpc
zeek-dns:
product: zeek
service: dns
conditions:
'@stream': dns
zeek-dnp3:
product: zeek
service: dnp3
conditions:
'@stream': dnp3
zeek-dpd:
product: zeek
service: dpd
conditions:
'@stream': dpd
zeek-files:
product: zeek
service: files
conditions:
'@stream': files
zeek-ftp:
product: zeek
service: ftp
conditions:
'@stream': ftp
zeek-gquic:
product: zeek
service: gquic
conditions:
'@stream': gquic
zeek-http:
product: zeek
service: http
conditions:
'@stream': http
zeek-http2:
product: zeek
service: http2
conditions:
'@stream': http2
zeek-intel:
product: zeek
service: intel
conditions:
'@stream': intel
zeek-irc:
product: zeek
service: irc
conditions:
'@stream': irc
zeek-kerberos:
product: zeek
service: kerberos
conditions:
'@stream': kerberos
zeek-known_certs:
product: zeek
service: known_certs
conditions:
'@stream': known_certs
zeek-known_hosts:
product: zeek
service: known_hosts
conditions:
'@stream': known_hosts
zeek-known_modbus:
product: zeek
service: known_modbus
conditions:
'@stream': known_modbus
zeek-known_services:
product: zeek
service: known_services
conditions:
'@stream': known_services
zeek-modbus:
product: zeek
service: modbus
conditions:
'@stream': modbus
zeek-modbus_register_change:
product: zeek
service: modbus_register_change
conditions:
'@stream': modbus_register_change
zeek-mqtt_connect:
product: zeek
service: mqtt_connect
conditions:
'@stream': mqtt_connect
zeek-mqtt_publish:
product: zeek
service: mqtt_publish
conditions:
'@stream': mqtt_publish
zeek-mqtt_subscribe:
product: zeek
service: mqtt_subscribe
conditions:
'@stream': mqtt_subscribe
zeek-mysql:
product: zeek
service: mysql
conditions:
'@stream': mysql
zeek-notice:
product: zeek
service: notice
conditions:
'@stream': notice
zeek-ntlm:
product: zeek
service: ntlm
conditions:
'@stream': ntlm
zeek-ntp:
product: zeek
service: ntp
conditions:
'@stream': ntp
zeek-ocsp:
product: zeek
service: ntp
conditions:
'@stream': ocsp
zeek-pe:
product: zeek
service: pe
conditions:
'@stream': pe
zeek-pop3:
product: zeek
service: pop3
conditions:
'@stream': pop3
zeek-radius:
product: zeek
service: radius
conditions:
'@stream': radius
zeek-rdp:
product: zeek
service: rdp
conditions:
'@stream': rdp
zeek-rfb:
product: zeek
service: rfb
conditions:
'@stream': rfb
zeek-sip:
product: zeek
service: sip
conditions:
'@stream': sip
zeek-smb_files:
product: zeek
service: smb_files
conditions:
'@stream': smb_files
zeek-smb_mapping:
product: zeek
service: smb_mapping
conditions:
'@stream': smb_mapping
zeek-smtp:
product: zeek
service: smtp
conditions:
'@stream': smtp
zeek-smtp_links:
product: zeek
service: smtp_links
conditions:
'@stream': smtp_links
zeek-snmp:
product: zeek
service: snmp
conditions:
'@stream': snmp
zeek-socks:
product: zeek
service: socks
conditions:
'@stream': socks
zeek-software:
product: zeek
service: software
conditions:
'@stream': software
zeek-ssh:
product: zeek
service: ssh
conditions:
'@stream': ssh
zeek-ssl:
product: zeek
service: ssl
conditions:
'@stream': ssl
zeek-tls: # In case people call it TLS even though orig log is called ssl
product: zeek
service: tls
conditions:
'@stream': ssl
zeek-syslog:
product: zeek
service: syslog
conditions:
'@stream': syslog
zeek-tunnel:
product: zeek
service: tunnel
conditions:
'@stream': tunnel
zeek-traceroute:
product: zeek
service: traceroute
conditions:
'@stream': traceroute
zeek-weird:
product: zeek
service: weird
conditions:
'@stream': weird
zeek-x509:
product: zeek
service: x509
conditions:
'@stream': x509
zeek-ip_search:
product: zeek
service: network
conditions:
'@stream':
- conn
- conn_long
- dce_rpc
- dhcp
- dnp3
- dns
- ftp
- gquic
- http
- irc
- kerberos
- modbus
- mqtt_connect
- mqtt_publish
- mqtt_subscribe
- mysql
- ntlm
- ntp
- radius
- rfb
- sip
- smb_files
- smb_mapping
- smtp
- smtp_links
- snmp
- socks
- ssh
- tls #SSL
- tunnel
- weird
defaultindex: 'logstash-*'
fieldmappings:
# All Logs Applied Mapping & Taxonomy
dst_ip: id.resp_h
dst_port: id.resp_p
network_protocol: proto
src_ip: id.orig_h
src_port: id.orig_p
# DNS matching Taxonomy & DNS Category
answer: answers
#question_length: # Does not exist in open source version
record_type: qtype_name
#parent_domain: # Does not exist in open source version
# HTTP matching Taxonomy & Web/Proxy Category
cs-bytes: request_body_len
cs-cookie: cookie
r-dns: host
sc-bytes: response_body_len
sc-status: status_code
c-uri: uri
c-uri-extension: uri
c-uri-query: uri
c-uri-stem: uri
c-useragent: user_agent
cs-host: host
cs-method: method
cs-referrer: referrer
cs-version: version
# Temporary one off rule name fields
agent.version: version
c-cookie: cookie
c-ip: id.orig_h
cs-uri: uri
clientip: id.orig_h
clientIP: id.orig_h
dest_domain:
- query
- host
- server_name
dest_ip: id.resp_h
dest_port: id.resp_p
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname:
- query
- host
- server_name
DestinationAddress:
DestinationHostname:
- host
- query
- server_name
DestinationIp: id.resp_h
DestinationIP: id.resp_h
DestinationPort: id.resp_p
dst-ip: id.resp_h
dstip: id.resp_h
dstport: id.resp_p
Host:
- host
- query
- server_name
HostVersion: http.version
http_host:
- host
- query
- server_name
http_uri: uri
http_url: uri
http_user_agent: user_agent
http.request.url-query-params: uri
HttpMethod: method
in_url: uri
# parent_domain: # Not in open source zeek
post_url_parameter: uri
Request Url: uri
request_url: uri
request_URL: uri
RequestUrl: uri
#response: status_code
resource.url: uri
resource.URL: uri
sc_status: status_code
sender_domain:
- query
- server_name
service.response_code: status_code
source: id.orig_h
SourceAddr: id.orig_h
SourceAddress: id.orig_h
SourceIP: id.orig_h
SourceIp: id.orig_h
SourceNetworkAddress: id.orig_h
SourcePort: id.orig_p
srcip: id.orig_h
Status: status_code
status: status_code
url: uri
URL: uri
url_query: uri
url.query: uri
uri_path: uri
user_agent: user_agent
user_agent.name: user_agent
user-agent: user_agent
User-Agent: user_agent
useragent: user_agent
UserAgent: user_agent
User Agent: user_agent
web_dest:
- host
- query
- server_name
web.dest:
- host
- query
- server_name
Web.dest:
- host
- query
- server_name
web.host:
- host
- query
- server_name
Web.host:
- host
- query
- server_name
web_method: method
Web_method: method
web.method: method
Web.method: method
web_src: id.orig_h
web_status: status_code
Web_status: status_code
web.status: status_code
Web.status: status_code
web_uri: uri
web_url: uri
# Most are in ECS, but for things not using Elastic - these need renamed
destination.ip: id.resp_h
destination.port: id.resp_p
http.request.body.content: post_body
#source.domain:
source.ip: id.orig_h
source.port: id.orig_p
+97
View File
@@ -0,0 +1,97 @@
title: Humio log source conditions
order: 20
backends:
- humio
fieldmappings:
EventID: winlog.event_id
Event_ID: winlog.event_id
eventId: winlog.event_id
event_id: winlog.event_id
event-id: winlog.event_id
eventid: winlog.event_id
AccessMask: winlog.event_data.AccessMask
AccountName: winlog.event_data.AccountName
AllowedToDelegateTo: winlog.event_data.AllowedToDelegateTo
AttributeLDAPDisplayName: winlog.event_data.AttributeLDAPDisplayName
AuditPolicyChanges: winlog.event_data.AuditPolicyChanges
AuthenticationPackageName: winlog.event_data.AuthenticationPackageName
CallingProcessName: winlog.event_data.CallingProcessName
CallTrace: winlog.event_data.CallTrace
Channel: winlog.channel
CommandLine: winlog.event_data.CommandLine
ComputerName: winlog.ComputerName
CurrentDirectory: winlog.event_data.CurrentDirectory
Description: winlog.event_data.Description
DestinationHostname: winlog.event_data.DestinationHostname
DestinationIp: winlog.event_data.DestinationIp
dst_ip: winlog.event_data.DestinationIp
DestinationIsIpv6: winlog.event_data.DestinationIsIpv6
DestinationPort: winlog.event_data.DestinationPort
dst_port: winlog.event_data.DestinationPort
Details: winlog.event_data.Details
EngineVersion: winlog.event_data.EngineVersion
EventType: winlog.event_data.EventType
FailureCode: winlog.event_data.FailureCode
FileName: winlog.event_data.FileName
GrantedAccess: winlog.event_data.GrantedAccess
GroupName: winlog.event_data.GroupName
GroupSid: winlog.event_data.GroupSid
Hashes: winlog.event_data.Hashes
HiveName: winlog.event_data.HiveName
HostVersion: winlog.event_data.HostVersion
Image: winlog.event_data.Image
ImageLoaded: winlog.event_data.ImageLoaded
ImagePath: winlog.event_data.ImagePath
Imphash: winlog.event_data.Imphash
IpAddress: winlog.event_data.IpAddress
KeyLength: winlog.event_data.KeyLength
LogonProcessName: winlog.event_data.LogonProcessName
LogonType: winlog.event_data.LogonType
NewProcessName: winlog.event_data.NewProcessName
ObjectClass: winlog.event_data.ObjectClass
ObjectName: winlog.event_data.ObjectName
ObjectType: winlog.event_data.ObjectType
ObjectValueName: winlog.event_data.ObjectValueName
ParentCommandLine: winlog.event_data.ParentCommandLine
ParentProcessName: winlog.event_data.ParentProcessName
ParentImage: winlog.event_data.ParentImage
Path: winlog.event_data.Path
PipeName: winlog.event_data.PipeName
ProcessCommandLine: winlog.event_data.ProcessCommandLine
ProcessName: winlog.event_data.ProcessName
Properties: winlog.event_data.Properties
SecurityID: winlog.event_data.SecurityID
ServiceFileName: winlog.event_data.ServiceFileName
ServiceName: winlog.event_data.ServiceName
ShareName: winlog.event_data.ShareName
Signature: winlog.event_data.Signature
Source: winlog.event_data.Source
SourceImage: winlog.event_data.SourceImage
SourceIp: winlog.event_data.SourceIp
src_ip: winlog.event_data.SourceIp
StartModule: winlog.event_data.StartModule
Status: winlog.event_data.Status
SubjectUserName: winlog.event_data.SubjectUserName
SubjectUserSid: winlog.event_data.SubjectUserSid
TargetFilename: winlog.event_data.TargetFilename
Targetfilename: winlog.event_data.TargetFilename
TargetImage: winlog.event_data.TargetImage
TargetObject: winlog.event_data.TargetObject
TicketEncryptionType: winlog.event_data.TicketEncryptionType
TicketOptions: winlog.event_data.TicketOptions
User: winlog.event_data.User
WorkstationName: winlog.event_data.WorkstationName
# Channel: WLAN-Autoconfig AND EventID: 8001
AuthenticationAlgorithm: winlog.event_data.AuthenticationAlgorithm
BSSID: winlog.event_data.BSSID
BSSType: winlog.event_data.BSSType
CipherAlgorithm: winlog.event_data.CipherAlgorithm
ConnectionId: winlog.event_data.ConnectionId
ConnectionMode: winlog.event_data.ConnectionMode
InterfaceDescription: winlog.event_data.InterfaceDescription
InterfaceGuid: winlog.event_data.InterfaceGuid
OnexEnabled: winlog.event_data.OnexEnabled
PHYType: winlog.event_data.PHYType
ProfileName: winlog.event_data.ProfileName
SSID: winlog.event_data.SSID
+349
View File
@@ -0,0 +1,349 @@
title: Zeek field mappings for default collection of JSON logs with no parsing/normalization done and sending into logstash-*index
order: 20
backends:
- es-qs
- es-dsl
- elasticsearch-rule
- kibana
- xpack-watcher
- elastalert
- elastalert-dsl
logsources:
zeek:
product: zeek
index: 'logstash*'
zeek-category-accounting:
category: accounting
rewrite:
product: zeek
service: syslog
zeek-category-firewall:
category: firewall
conditions:
'@stream': conn
zeek-category-dns:
category: dns
conditions:
'@stream': dns
zeek-category-proxy:
category: proxy
rewrite:
product: zeek
service: http
zeek-category-webserver:
category: webserver
conditions:
'@stream': http
rewrite:
product: zeek
service: http
zeek-conn:
product: zeek
service: conn
conditions:
'@stream': conn
zeek-conn_long:
product: zeek
service: conn_long
conditions:
'@stream': conn_long
zeek-dce_rpc:
product: zeek
service: dce_rpc
conditions:
'@stream': dce_rpc
zeek-dns:
product: zeek
service: dns
conditions:
'@stream': dns
zeek-dnp3:
product: zeek
service: dnp3
conditions:
'@stream': dnp3
zeek-dpd:
product: zeek
service: dpd
conditions:
'@stream': dpd
zeek-files:
product: zeek
service: files
conditions:
'@stream': files
zeek-ftp:
product: zeek
service: ftp
conditions:
'@stream': ftp
zeek-gquic:
product: zeek
service: gquic
conditions:
'@stream': gquic
zeek-http:
product: zeek
service: http
conditions:
'@stream': http
zeek-http2:
product: zeek
service: http2
conditions:
'@stream': http2
zeek-intel:
product: zeek
service: intel
conditions:
'@stream': intel
zeek-irc:
product: zeek
service: irc
conditions:
'@stream': irc
zeek-kerberos:
product: zeek
service: kerberos
conditions:
'@stream': kerberos
zeek-known_certs:
product: zeek
service: known_certs
conditions:
'@stream': known_certs
zeek-known_hosts:
product: zeek
service: known_hosts
conditions:
'@stream': known_hosts
zeek-known_modbus:
product: zeek
service: known_modbus
conditions:
'@stream': known_modbus
zeek-known_services:
product: zeek
service: known_services
conditions:
'@stream': known_services
zeek-modbus:
product: zeek
service: modbus
conditions:
'@stream': modbus
zeek-modbus_register_change:
product: zeek
service: modbus_register_change
conditions:
'@stream': modbus_register_change
zeek-mqtt_connect:
product: zeek
service: mqtt_connect
conditions:
'@stream': mqtt_connect
zeek-mqtt_publish:
product: zeek
service: mqtt_publish
conditions:
'@stream': mqtt_publish
zeek-mqtt_subscribe:
product: zeek
service: mqtt_subscribe
conditions:
'@stream': mqtt_subscribe
zeek-mysql:
product: zeek
service: mysql
conditions:
'@stream': mysql
zeek-notice:
product: zeek
service: notice
conditions:
'@stream': notice
zeek-ntlm:
product: zeek
service: ntlm
conditions:
'@stream': ntlm
zeek-ntp:
product: zeek
service: ntp
conditions:
'@stream': ntp
zeek-ocsp:
product: zeek
service: ntp
conditions:
'@stream': ocsp
zeek-pe:
product: zeek
service: pe
conditions:
'@stream': pe
zeek-pop3:
product: zeek
service: pop3
conditions:
'@stream': pop3
zeek-radius:
product: zeek
service: radius
conditions:
'@stream': radius
zeek-rdp:
product: zeek
service: rdp
conditions:
'@stream': rdp
zeek-rfb:
product: zeek
service: rfb
conditions:
'@stream': rfb
zeek-sip:
product: zeek
service: sip
conditions:
'@stream': sip
zeek-smb_files:
product: zeek
service: smb_files
conditions:
'@stream': smb_files
zeek-smb_mapping:
product: zeek
service: smb_mapping
conditions:
'@stream': smb_mapping
zeek-smtp:
product: zeek
service: smtp
conditions:
'@stream': smtp
zeek-smtp_links:
product: zeek
service: smtp_links
conditions:
'@stream': smtp_links
zeek-snmp:
product: zeek
service: snmp
conditions:
'@stream': snmp
zeek-socks:
product: zeek
service: socks
conditions:
'@stream': socks
zeek-software:
product: zeek
service: software
conditions:
'@stream': software
zeek-ssh:
product: zeek
service: ssh
conditions:
'@stream': ssh
zeek-ssl:
product: zeek
service: ssl
conditions:
'@stream': ssl
zeek-tls: # In case people call it TLS even though orig log is called ssl
product: zeek
service: tls
conditions:
'@stream': ssl
zeek-syslog:
product: zeek
service: syslog
conditions:
'@stream': syslog
zeek-tunnel:
product: zeek
service: tunnel
conditions:
'@stream': tunnel
zeek-traceroute:
product: zeek
service: traceroute
conditions:
'@stream': traceroute
zeek-weird:
product: zeek
service: weird
conditions:
'@stream': weird
zeek-x509:
product: zeek
service: x509
conditions:
'@stream': x509
zeek-ip_search:
product: zeek
service: network
conditions:
'@stream':
- conn
- conn_long
- dce_rpc
- dhcp
- dnp3
- dns
- ftp
- gquic
- http
- irc
- kerberos
- modbus
- mqtt_connect
- mqtt_publish
- mqtt_subscribe
- mysql
- ntlm
- ntp
- radius
- rfb
- sip
- smb_files
- smb_mapping
- smtp
- smtp_links
- snmp
- socks
- ssh
- tls #SSL
- tunnel
- weird
defaultindex: 'logstash-*'
fieldmappings:
# All Logs Applied Mapping & Taxonomy
clientip: id.resp_h
dst_ip: id.resp_h
dst_port: id.resp_p
network_protocol: proto
src_ip: id.orig_h
src_port: id.orig_p
# DNS matching Taxonomy & DNS Category
answer: answers
#question_length: # Does not exist in open source version
record_type: qtype_name
#parent_domain: # Does not exist in open source version
# HTTP matching Taxonomy & Web/Proxy Category
cs-bytes: request_body_len
cs-cookie: cookie
r-dns: host
sc-bytes: response_body_len
sc-status: status_code
c-uri: uri
c-uri-extension: uri
c-uri-query: uri
c-uri-stem: uri
c-useragent: user_agent
cs-host: host
cs-method: method
cs-referrer: referrer
cs-version: version
+62
View File
@@ -0,0 +1,62 @@
logsources:
windows-application:
product: windows
service: application
conditions:
LogName: 'Application'
windows-security:
product: windows
service: security
conditions:
LogName: 'Security'
windows-system:
product: windows
service: system
conditions:
LogName: 'System'
windows-sysmon:
product: windows
service: sysmon
conditions:
LogName: 'Microsoft-Windows-Sysmon/Operational'
windows-powershell:
product: windows
service: powershell
conditions:
LogName: 'Microsoft-Windows-PowerShell/Operational'
windows-classicpowershell:
product: windows
service: powershell-classic
conditions:
LogName: 'Windows PowerShell'
windows-taskscheduler:
product: windows
service: taskscheduler
conditions:
LogName: 'Microsoft-Windows-TaskScheduler/Operational'
windows-wmi:
product: windows
service: wmi
conditions:
LogName: 'Microsoft-Windows-WMI-Activity/Operational'
windows-dns-server:
product: windows
service: dns-server
category: dns
conditions:
LogName: 'DNS Server'
windows-dns-server-audit:
product: windows
service: dns-server-audit
conditions:
LogName: 'Microsoft-Windows-DNS-Server/Audit'
windows-driver-framework:
product: windows
service: driver-framework
conditions:
LogName: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational'
windows-ntlm:
product: windows
service: ntlm
conditions:
LogName: 'Microsoft-Windows-NTLM/Operational'
+420 -3
View File
@@ -3,44 +3,461 @@ order: 20
backends:
- splunk
- splunkxml
- corelight_splunk
logsources:
zeek-category-accounting:
category: accounting
rewrite:
product: zeek
service: syslog
zeek-category-firewall:
category: firewall
conditions:
sourcetype: 'bro:conn:json'
zeek-category-dns:
category: dns
conditions:
sourcetype: 'bro:dns:json'
zeek-category-proxy:
category: proxy
rewrite:
product: zeek
service: http
zeek-category-webserver:
category: webserver
conditions:
sourcetype: 'bro:http:json'
rewrite:
product: zeek
service: http
zeek-conn:
product: zeek
service: conn
conditions:
sourcetype: 'bro:conn:json'
zeek-conn_long:
product: zeek
service: conn_long
conditions:
sourcetype: 'bro:conn_long:json'
zeek-dce_rpc:
product: zeek
service: dce_rpc
conditions:
sourcetype: 'bro:dce_rpc:json'
zeek-dns:
product: zeek
service: dns
conditions:
sourcetype: 'bro:dns:json'
zeek-dnp3:
product: zeek
service: dnp3
conditions:
sourcetype: 'bro:dnp3:json'
zeek-dpd:
product: zeek
service: dpd
conditions:
sourcetype: 'bro:dpd:json'
zeek-files:
product: zeek
service: files
conditions:
sourcetype: 'bro:files:json'
zeek-kerberos:
zeek-ftp:
product: zeek
service: kerberos
service: ftp
conditions:
sourcetype: 'bro:kerberos:json'
sourcetype: 'bro:ftp:json'
zeek-gquic:
product: zeek
service: gquic
conditions:
sourcetype: 'bro:gquic:json'
zeek-http:
product: zeek
service: http
conditions:
sourcetype: 'bro:http:json'
zeek-http2:
product: zeek
service: http2
conditions:
sourcetype: 'bro:http2:json'
zeek-intel:
product: zeek
service: intel
conditions:
sourcetype: 'bro:intel:json'
zeek-irc:
product: zeek
service: irc
conditions:
sourcetype: 'bro:irc:json'
zeek-kerberos:
product: zeek
service: kerberos
conditions:
sourcetype: 'bro:kerberos:json'
zeek-known_certs:
product: zeek
service: known_certs
conditions:
sourcetype: 'bro:known_certs:json'
zeek-known_hosts:
product: zeek
service: known_hosts
conditions:
sourcetype: 'bro:known_hosts:json'
zeek-known_modbus:
product: zeek
service: known_modbus
conditions:
sourcetype: 'bro:known_modbus:json'
zeek-known_services:
product: zeek
service: known_services
conditions:
sourcetype: 'bro:known_services:json'
zeek-modbus:
product: zeek
service: modbus
conditions:
sourcetype: 'bro:modbus:json'
zeek-modbus_register_change:
product: zeek
service: modbus_register_change
conditions:
sourcetype: 'bro:modbus_register_change:json'
zeek-mqtt_connect:
product: zeek
service: mqtt_connect
conditions:
sourcetype: 'bro:mqtt_connect:json'
zeek-mqtt_publish:
product: zeek
service: mqtt_publish
conditions:
sourcetype: 'bro:mqtt_publish:json'
zeek-mqtt_subscribe:
product: zeek
service: mqtt_subscribe
conditions:
sourcetype: 'bro:mqtt_subscribe:json'
zeek-mysql:
product: zeek
service: mysql
conditions:
sourcetype: 'bro:mysql:json'
zeek-notice:
product: zeek
service: notice
conditions:
sourcetype: 'bro:notice:json'
zeek-ntlm:
product: zeek
service: ntlm
conditions:
sourcetype: 'bro:ntlm:json'
zeek-ntp:
product: zeek
service: ntp
conditions:
sourcetype: 'bro:ntp:json'
zeek-ocsp:
product: zeek
service: ntp
conditions:
sourcetype: 'bro:ocsp:json'
zeek-pe:
product: zeek
service: pe
conditions:
sourcetype: 'bro:pe:json'
zeek-pop3:
product: zeek
service: pop3
conditions:
sourcetype: 'bro:pop3:json'
zeek-radius:
product: zeek
service: radius
conditions:
sourcetype: 'bro:radius:json'
zeek-rdp:
product: zeek
service: rdp
conditions:
sourcetype: 'bro:rdp:json'
zeek-rfb:
product: zeek
service: rfb
conditions:
sourcetype: 'bro:rfb:json'
zeek-sip:
product: zeek
service: sip
conditions:
sourcetype: 'bro:sip:json'
zeek-smb_files:
product: zeek
service: smb_files
conditions:
sourcetype: 'bro:smb_files:json'
zeek-smb_mapping:
product: zeek
service: smb_mapping
conditions:
sourcetype: 'bro:smb_mapping:json'
zeek-smtp:
product: zeek
service: smtp
conditions:
sourcetype: 'bro:smtp:json'
zeek-smtp_links:
product: zeek
service: smtp_links
conditions:
sourcetype: 'bro:smtp_links:json'
zeek-snmp:
product: zeek
service: snmp
conditions:
sourcetype: 'bro:snmp:json'
zeek-socks:
product: zeek
service: socks
conditions:
sourcetype: 'bro:socks:json'
zeek-software:
product: zeek
service: software
conditions:
sourcetype: 'bro:software:json'
zeek-ssh:
product: zeek
service: ssh
conditions:
sourcetype: 'bro:ssh:json'
zeek-ssl:
product: zeek
service: ssl
conditions:
sourcetype: 'bro:ssl:json'
zeek-tls: # In case people call it TLS even though log is called ssl
product: zeek
service: tls
conditions:
sourcetype: 'bro:ssl:json'
zeek-syslog:
product: zeek
service: syslog
conditions:
sourcetype: 'bro:syslog:json'
zeek-tunnel:
product: zeek
service: tunnel
conditions:
sourcetype: 'bro:tunnel:json'
zeek-traceroute:
product: zeek
service: traceroute
conditions:
sourcetype: 'bro:traceroute:json'
zeek-weird:
product: zeek
service: weird
conditions:
sourcetype: 'bro:weird:json'
zeek-x509:
product: zeek
service: x509
conditions:
sourcetype: 'bro:x509:json'
zeek-ip_search:
product: zeek
service: network
conditions:
sourcetype:
- 'bro:conn:json'
- 'bro:conn_long:json'
- 'bro:dce_rpc:json'
- 'bro:dhcp:json'
- 'bro:dnp3:json'
- 'bro:dns:json'
- 'bro:ftp:json'
- 'bro:gquic:json'
- 'bro:http:json'
- 'bro:irc:json'
- 'bro:kerberos:json'
- 'bro:modbus:json'
- 'bro:mqtt_connect:json'
- 'bro:mqtt_publish:json'
- 'bro:mqtt_subscribe:json'
- 'bro:mysql:json'
- 'bro:ntlm:json'
- 'bro:ntp:json'
- 'bro:radius:json'
- 'bro:rfb:json'
- 'bro:sip:json'
- 'bro:smb_files:json'
- 'bro:smb_mapping:json'
- 'bro:smtp:json'
- 'bro:smtp_links:json'
- 'bro:snmp:json'
- 'bro:socks:json'
- 'bro:ssh:json'
- 'bro:ssl:json'
- 'bro:tunnel:json'
- 'bro:weird:json'
fieldmappings:
# All Logs Applied Mapping & Taxonomy
dst_ip: id.resp_h
dst_port: id.resp_p
network_protocol: proto
src_ip: id.orig_h
src_port: id.orig_p
# DNS matching Taxonomy & DNS Category
answer: answers
#question_length: # Does not exist in open source version
record_type: qtype_name
#parent_domain: # Does not exist in open source version
# HTTP matching Taxonomy & Web/Proxy Category
cs-bytes: request_body_len
cs-cookie: cookie
r-dns: host
sc-bytes: response_body_len
sc-status: status_code
c-uri: uri
c-uri-extension: uri
c-uri-query: uri
c-uri-stem: uri
c-useragent: user_agent
cs-host: host
cs-method: method
cs-referrer: referrer
cs-version: version
# Temporary one off rule name fields
agent.version: version
c-cookie: cookie
c-ip: id.orig_h
cs-uri: uri
clientip: id.orig_h
clientIP: id.orig_h
dest_domain:
- query
- host
- server_name
dest_ip: id.resp_h
dest_port: id.resp_p
#TODO:WhatShouldThisBe?==dest:
#TODO:WhatShouldThisBe?==destination:
#TODO:WhatShouldThisBe?==Destination:
destination.hostname:
- query
- host
- server_name
DestinationAddress:
DestinationHostname:
- host
- query
- server_name
DestinationIp: id.resp_h
DestinationIP: id.resp_h
DestinationPort: id.resp_p
dst-ip: id.resp_h
dstip: id.resp_h
dstport: id.resp_p
Host:
- host
- query
- server_name
HostVersion: http.version
http_host:
- host
- query
- server_name
http_uri: uri
http_url: uri
http_user_agent: user_agent
http.request.url-query-params: uri
HttpMethod: method
in_url: uri
# parent_domain: # Not in open source zeek
post_url_parameter: uri
Request Url: uri
request_url: uri
request_URL: uri
RequestUrl: uri
#response: status_code
resource.url: uri
resource.URL: uri
sc_status: status_code
sender_domain:
- query
- server_name
service.response_code: status_code
source: id.orig_h
SourceAddr: id.orig_h
SourceAddress: id.orig_h
SourceIP: id.orig_h
SourceIp: id.orig_h
SourceNetworkAddress: id.orig_h
SourcePort: id.orig_p
srcip: id.orig_h
Status: status_code
status: status_code
url: uri
URL: uri
url_query: uri
url.query: uri
uri_path: uri
user_agent: user_agent
user_agent.name: user_agent
user-agent: user_agent
User-Agent: user_agent
useragent: user_agent
UserAgent: user_agent
User Agent: user_agent
web_dest:
- host
- query
- server_name
web.dest:
- host
- query
- server_name
Web.dest:
- host
- query
- server_name
web.host:
- host
- query
- server_name
Web.host:
- host
- query
- server_name
web_method: method
Web_method: method
web.method: method
Web.method: method
web_src: id.orig_h
web_status: status_code
Web_status: status_code
web.status: status_code
Web.status: status_code
web_uri: uri
web_url: uri
# Most are in ECS, but for things not using Elastic - these need renamed
destination.ip: id.resp_h
destination.port: id.resp_p
http.request.body.content: post_body
source.domain:
- host
- query
- server_name
source.ip: id.orig_h
source.port: id.orig_p
+26 -4
View File
@@ -56,15 +56,17 @@ fieldmappings:
AuthenticationPackageName: winlog.event_data.AuthenticationPackageName
CallingProcessName: winlog.event_data.CallingProcessName
CallTrace: winlog.event_data.CallTrace
Channel: winlog.channel
CommandLine: process.args
ComputerName: winlog.computer_name
ContextInfo: winlog.event_data.ContextInfo
ComputerName: winlog.ComputerName
CurrentDirectory: process.working_directory
Description: winlog.event_data.Description
DestinationHostname: destination.domain
DestinationIp: destination.ip
dst_ip: destination.ip
#DestinationIsIpv6: winlog.event_data.DestinationIsIpv6 #=gets deleted and not boolean...https://github.com/elastic/beats/blob/71eee76e7cfb8d5b18dfacad64864370ddb14ce7/x-pack/winlogbeat/module/sysmon/config/winlogbeat-sysmon.js#L278-L279
DestinationPort: destination.port
dst_port: destination.port
DestinationPortName: network.protocol
Details: winlog.event_data.Details
EngineVersion: winlog.event_data.EngineVersion
@@ -72,9 +74,14 @@ fieldmappings:
FailureCode: winlog.event_data.FailureCode
FileName: file.path
GrantedAccess: winlog.event_data.GrantedAccess
GroupName: winlog.event_data.GroupName
GroupSid: winlog.event_data.GroupSid
GroupName:
- winlog.event_data.GroupName
- group.name
GroupSid:
- group.id
- winlog.event_data.GroupSid
Hashes: winlog.event_data.Hashes
file_hash: winlog.event_data.Hashes
HiveName: winlog.event_data.HiveName
HostVersion: winlog.event_data.HostVersion
Image: process.executable
@@ -109,7 +116,9 @@ fieldmappings:
SourceHostname: source.domain
SourceImage: process.executable
SourceIp: source.ip
src_ip: source.ip
SourcePort: source.port
src_port: source.port
#SourceIsIpv6: winlog.event_data.SourceIsIpv6 #=gets deleted and not boolean...https://github.com/elastic/beats/blob/71eee76e7cfb8d5b18dfacad64864370ddb14ce7/x-pack/winlogbeat/module/sysmon/config/winlogbeat-sysmon.js#L278-L279
StartModule: winlog.event_data.StartModule
Status: winlog.event_data.Status
@@ -126,3 +135,16 @@ fieldmappings:
TargetUserSid: user.id
User: user.name
WorkstationName: source.domain
# Channel: WLAN-Autoconfig AND EventID: 8001
AuthenticationAlgorithm: winlog.event_data.AuthenticationAlgorithm
BSSID: winlog.event_data.BSSID
BSSType: winlog.event_data.BSSType
CipherAlgorithm: winlog.event_data.CipherAlgorithm
ConnectionId: winlog.event_data.ConnectionId
ConnectionMode: winlog.event_data.ConnectionMode
InterfaceDescription: winlog.event_data.InterfaceDescription
InterfaceGuid: winlog.event_data.InterfaceGuid
OnexEnabled: winlog.event_data.OnexEnabled
PHYType: winlog.event_data.PHYType
ProfileName: winlog.event_data.ProfileName
SSID: winlog.event_data.SSID
+168 -57
View File
@@ -13,22 +13,49 @@
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re, json
import os
import sys
import re
import json
import xml.etree.ElementTree as xml
from ..config.mapping import (
from sigma.config.mapping import (
SimpleFieldMapping, MultiFieldMapping, ConditionalFieldMapping
)
from ..parser.condition import SigmaAggregationParser
from ..parser.exceptions import SigmaParseError
from ..parser.modifiers.type import SigmaRegularExpressionModifier
from .base import SingleTextQueryBackend
from sigma.parser.condition import SigmaAggregationParser
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
from sigma.backends.base import SingleTextQueryBackend
from sigma.parser.modifiers.base import SigmaTypeModifier
from sigma.parser.modifiers.transform import SigmaContainsModifier, SigmaStartswithModifier, SigmaEndswithModifier
from .data import sysmon_schema
from .exceptions import NotSupportedError
class AzureLogAnalyticsBackend(SingleTextQueryBackend):
class DeepFieldMappingMixin(object):
def fieldNameMapping(self, fieldname, value):
if isinstance(fieldname, str):
get_config = self.sigmaconfig.fieldmappings.get(fieldname)
if not get_config and '|' in fieldname:
fieldname = fieldname.split('|', 1)[0]
get_config = self.sigmaconfig.fieldmappings.get(fieldname)
if isinstance(get_config, ConditionalFieldMapping):
condition = self.sigmaconfig.fieldmappings.get(fieldname).conditions
for key, item in self.logsource.items():
if condition.get(key) and condition.get(key, {}).get(item):
new_fieldname = condition.get(key, {}).get(item)
if any(new_fieldname):
return super().fieldNameMapping(new_fieldname[0], value)
return super().fieldNameMapping(fieldname, value)
def generate(self, sigmaparser):
self.logsource = sigmaparser.parsedyaml.get("logsource", {})
return super().generate(sigmaparser)
class AzureLogAnalyticsBackend(DeepFieldMappingMixin, SingleTextQueryBackend):
"""Converts Sigma rule into Azure Log Analytics Queries."""
identifier = "ala"
active = True
@@ -43,8 +70,7 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
)
config_required = False
reEscape = re.compile('("|(?<!\\\\)\\\\(?![*?\\\\]))')
reClear = None
reEscape = re.compile('(\\\|"|(?<!)(?![*?]))')
andToken = " and "
orToken = " or "
notToken = "not "
@@ -57,13 +83,17 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
mapExpression = "%s == %s"
mapListsSpecialHandling = True
mapListValueExpression = "%s in %s"
_WIN_SECURITY_EVENT_MAP = {
"Image": "NewProcessName",
"ParentImage": "ParentProcessName",
"User": "SubjectUserName",
typedValueExpression = {
SigmaRegularExpressionModifier: "matches regex \"(?i)%s\"",
SigmaContainsModifier: "contains \"%s\""
}
# _WIN_SECURITY_EVENT_MAP = {
# "Image": "NewProcessName",
# "ParentImage": "ParentProcessName",
# "User": "SubjectUserName",
# }
def __init__(self, *args, **kwargs):
"""Initialize field mappings."""
super().__init__(*args, **kwargs)
@@ -77,10 +107,9 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
self._agg_var = None
self._has_logsource_event_cond = False
if not self.sysmon and not self.sigmaconfig.config:
self._field_map = self._WIN_SECURITY_EVENT_MAP
self._field_map = {}#self._WIN_SECURITY_EVENT_MAP
else:
self._field_map = {}
self.typedValueExpression[SigmaRegularExpressionModifier] = "matches regex \"%s\""
def id_mapping(self, src):
"""Identity mapping, source == target field name"""
@@ -105,27 +134,22 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
def default_value_mapping(self, val):
op = "=="
if isinstance(val, str):
if "*" in val[1:-1]: # value contains * inside string - use regex match
if "*" in val[1:-1]: # value contains * inside string - use regex match
op = "matches regex"
val = re.sub('([".^$]|\\\\(?![*?]))', '\\\\\g<1>', val)
val = re.sub('\\*', '.*', val)
if "\\" in val:
return "%s \"(?i)%s\"" % (op, val)
return "%s \"(?i)%s\"" % (op, val)
elif val.startswith("*") or val.endswith("*"):
op = "contains"
val = re.sub('([".^$]|(?![*?]))', '\g<1>', val)
val = re.sub('\\*', '', val)
val = re.sub('\\?', '.', val)
if "\\" in val:
return "%s @\"%s\"" % (op, val)
else: # value possibly only starts and/or ends with *, use prefix/postfix match
if val.endswith("*") and val.startswith("*"):
op = "contains"
val = self.cleanValue(val[1:-1])
elif val.endswith("*"):
op = "startswith"
val = self.cleanValue(val[:-1])
elif val.startswith("*"):
op = "endswith"
val = self.cleanValue(val[1:])
if "\\" in val:
return "%s @\"%s\"" % (op, val)
# if "\\" in val:
# return "%s @\"%s\"" % (op, val)
return "%s \"%s\"" % (op, val)
# elif "\\" in val:
# return "%s @\"%s\"" % (op, val)
return "%s \"%s\"" % (op, val)
def generate(self, sigmaparser):
@@ -140,13 +164,11 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
self.service = None
detection = sigmaparser.parsedyaml.get("detection", {})
is_parent_cmd = False
if "keywords" in detection.keys():
return super().generate(sigmaparser)
if self.category == "process_creation":
self.table = "SysmonEvent"
self.table = "SecurityEvent"
self.eventid = "1"
elif self.service == "security":
self.table = "SecurityEvent"
@@ -154,6 +176,12 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
self.table = "SysmonEvent"
elif self.service == "powershell":
self.table = "Event"
elif self.service == "office365":
self.table = "OfficeActivity"
elif self.service == "azuread":
self.table = "AuditLogs"
elif self.service == "azureactivity":
self.table = "AzureActivity"
else:
if self.service:
if "-" in self.service:
@@ -181,8 +209,8 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
elif self.sysmon:
parse_string = self.map_sysmon_schema(self.eventid)
before = "%s | parse EventData with * %s | where " % (self.table, parse_string)
elif self.category == "process_creation" and not self._has_logsource_event_cond:
before = "%s | where EventID == \"%s\" | where " % (self.table, self.eventid)
# elif self.category == "process_creation" and not self._has_logsource_event_cond:
# before = "%s | where EventID == \"%s\" | where " % (self.table, self.eventid)
else:
before = "%s | where " % self.table
return before
@@ -193,6 +221,7 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
and creates an appropriate table reference.
"""
key, value = node
key = self.fieldNameMapping(key, value)
if type(value) == list: # handle map items with values list like multiple OR-chained conditions
return "(" + self.generateORNode(
[(key, v) for v in value]
@@ -207,17 +236,26 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
self.table = "SecurityEvent"
elif self.service == "system":
self.table = "Event"
elif type(value) in (str, int): # default value processing
mapping = (key, self.default_value_mapping)
return self.mapExpression % (key, value)
elif type(value) in [SigmaTypeModifier, SigmaContainsModifier, SigmaRegularExpressionModifier, SigmaStartswithModifier, SigmaEndswithModifier]:
return self.generateMapItemTypedNode(key, value)
elif type(value) in (str, int): # default value processing'
#default_filters = ["endswith", "contains", "startswith", "re"]
# if any([item for item in default_filters if item in key]):
# key = re.sub(key, default_filters, "")
# return self.regexExpression % (key, self.cleanValue(value))
# else:
# value_mapping = self.default_value_mapping
value_mapping = self.default_value_mapping
mapping = (key, value_mapping)
if len(mapping) == 1:
mapping = mapping[0]
if type(mapping) == str:
return mapping
elif callable(mapping):
conds = mapping(key, value)
return self.generateSubexpressionNode(
self.generateANDNode(
[cond for cond in mapping(key, value)]
[cond for cond in mapping(key, self.cleanValue(value))]
)
)
elif len(mapping) == 2:
@@ -226,12 +264,29 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend):
if type(mapitem) == str:
result.append(mapitem)
elif callable(mapitem):
result.append(mapitem(val))
result.append(mapitem(self.cleanValue(val)))
return "{} {}".format(*result)
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
elif type(value) == list:
return self.generateMapItemListNode(key, value)
return super().generateMapItemNode(node)
elif value is None:
return self.nullExpression % (key, )
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
def generateMapItemTypedNode(self, fieldname, value):
return "%s %s" % (fieldname, self.generateTypedValueNode(value))
def generateTypedValueNode(self, node):
try:
val = str(node)
if "*" in val:
val = re.sub('\\*', '.*', val)
return self.typedValueExpression[type(node)] % (val)
except KeyError:
raise NotImplementedError("Type modifier '{}' is not supported by backend".format(node.identifier))
def generateAggregation(self, agg):
if agg is None:
@@ -325,35 +380,89 @@ class AzureAPIBackend(AzureLogAnalyticsBackend):
def __init__(self, *args, **kwargs):
"""Initialize field mappings"""
super().__init__(*args, **kwargs)
self.techniques = self._load_mitre_file("techniques")
def create_rule(self, config):
tags = config.get("tags", [])
def find_technique(self, key_ids):
for key_id in set(key_ids):
if not key_id:
continue
for technique in self.techniques:
if key_id == technique.get("technique_id", ""):
yield technique
def _load_mitre_file(self, mitre_type):
try:
backend_dir = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "config", "mitre"))
path = os.path.join(backend_dir, "{}.json".format(mitre_type))
with open(path) as config_file:
config = json.load(config_file)
return config
except (IOError, OSError) as e:
print("Failed to open {} configuration file '%s': %s".format(path, str(e)), file=sys.stderr)
return []
except json.JSONDecodeError as e:
print("Failed to parse {} configuration file '%s' as valid YAML: %s" % (path, str(e)), file=sys.stderr)
return []
def skip_tactics_or_techniques(self, src_technics, src_tactics):
tactics = set()
technics = set()
local_storage_techniques = {item["technique_id"]: item for item in self.find_technique(src_technics)}
for key_id in src_technics:
src_tactic = local_storage_techniques.get(key_id, {}).get("tactic")
if not src_tactic:
continue
src_tactic = set(src_tactic)
for item in src_tactics:
if item in src_tactic:
technics.add(key_id)
tactics.add(item)
return sorted(tactics), sorted(technics)
def parse_severity(self, old_severity):
if old_severity.lower() == "critical":
return "high"
return old_severity
def get_tactics_and_techniques(self, tags):
tactics = list()
technics = list()
for tag in tags:
tag = tag.replace("attack.", "")
if re.match("[tT][0-9]{4}", tag):
if re.match("[t][0-9]{4}", tag, re.IGNORECASE):
technics.append(tag.title())
else:
if "_" in tag:
tag_list = tag.split("_")
tag_list = [item.title() for item in tag_list]
tactics.append("".join(tag_list))
else:
tactics.append(tag.title())
tag = tag.replace("_", " ")
tag = tag.title()
tactics.append(tag)
return tactics, technics
def create_rule(self, config):
tags = config.get("tags", [])
tactics, technics = self.get_tactics_and_techniques(tags)
tactics, technics = self.skip_tactics_or_techniques(technics, tactics)
tactics = list(map(lambda s: s.replace(" ", ""), tactics))
rule = {
"displayName": "{} by {}".format(config.get("title"), config.get('author')),
"description": "{} {}".format(config.get("description"), "Technique: {}.".format(",".join(technics))),
"severity": config.get("level", "medium"),
"severity": self.parse_severity(config.get("level", "medium")),
"enabled": True,
"query": config.get("translation"),
"queryFrequency": "12H",
"queryPeriod": "12H",
"triggerOperator": "GreaterThan",
"triggerThreshold": 1,
"triggerThreshold": 0,
"suppressionDuration": "12H",
"suppressionEnabled": False,
"suppressionEnabled": True,
"tactics": tactics
}
return json.dumps(rule)
@@ -365,3 +474,5 @@ class AzureAPIBackend(AzureLogAnalyticsBackend):
configs.update({"translation": translation})
rule = self.create_rule(configs)
return rule
else:
raise NotSupportedError("No table could be determined from Sigma rule")
+3 -8
View File
@@ -151,9 +151,8 @@ class ArcSightBackend(SingleTextQueryBackend):
return "(" + self.orToken.join([self.generateNode(val) for val in new_value]) + ")"
return "(" + self.orToken.join([self.generateNode(val) for val in node]) + ")"
class ArcSightBackend(SingleTextQueryBackend):
"""Converts Sigma rule into ArcSight saved search. Contributed by SOC Prime. https://socprime.com"""
class ArcSightESMBackend(SingleTextQueryBackend):
"""Converts Sigma rule into ArcSight ESM saved search. Contributed by SOC Prime. https://socprime.com"""
reEscape = re.compile('(["\\\()])')
identifier = "arcsight-esm"
active = True
@@ -188,13 +187,11 @@ class ArcSightBackend(SingleTextQueryBackend):
def generateCleanValueNodeLogsource(self, value):
return self.valueExpression % (self.cleanValue(str(value)))
def CleanNode(self, node):
if isinstance(node, str) and "*" in node and not node.startswith("*") and not node.endswith("*"):
node = ["*{}*".format(x) for x in node.split('*') if x]
return node
#Clearing values from special characters.
def generateMapItemNode(self, node):
key, value = node
@@ -225,7 +222,7 @@ class ArcSightBackend(SingleTextQueryBackend):
elif isinstance(value, str) and value.endswith("*"):
return self.startsWithExpression % (key, self.generateValueNode(self.CleanNode(value)))
else:
return self.generateValueNode(value)
return self.mapExpression % (key, self.generateValueNode(value))
elif isinstance(value, list):
new_value = list()
for item in value:
@@ -245,8 +242,6 @@ class ArcSightBackend(SingleTextQueryBackend):
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
# for keywords values with space
def generateValueNode(self, node):
if type(node) is int:
+32
View File
@@ -19,6 +19,7 @@ import sys
import sigma
import yaml
from sigma.backends.exceptions import NotSupportedError
from .mixins import RulenameCommentMixin, QuoteCharMixin
from sigma.parser.modifiers.base import SigmaTypeModifier
@@ -306,3 +307,34 @@ class SingleTextQueryBackend(RulenameCommentMixin, BaseBackend, QuoteCharMixin):
transformed from the original name given in the Sigma rule.
"""
return fieldname
class CorelightQueryBackend:
def generate(self, sigmaparser):
lgs = sigmaparser.parsedyaml.get("logsource")
allow_types = {
'category':
[
'proxy', 'firewall', 'webserver', 'accounting', 'dns'
],
'product':
[
'zeek', 'apache', 'netflow', 'firewall'
],
'service': [
'radius', 'kerberos', 'pe', 'ntlm', 'sip', 'syslog', 'ntp',
'mqtt_subscribe', 'smb_files', 'irc', 'http2', 'rfb',
'tunnel', 'socks', 'mqtt_publish', 'network', 'weird',
'known_certs', 'traceroute', 'modbus', 'smtp_links',
'ssl', 'known_hosts', 'software', 'smtp', 'tls', 'intel',
'ssh', 'dce_rpc', 'x509', 'known_services', 'http', 'files',
'gquic', 'ftp', 'dns', 'conn', 'dnp3', 'rdp', 'dpd',
'known_modbus', 'conn_long', 'modbus_register_change',
'mqtt_connect', 'pop3', 'mysql', 'notice', 'snmp', 'smb_mapping'
]
}
for logsource_type, value in lgs.items():
if allow_types.get(logsource_type) and value.lower() in allow_types.get(logsource_type):
return super().generate(sigmaparser)
lgs_text = ", ".join(["%s: %s" % (key, lgs.get(key)) for key in lgs.keys()])
raise NotSupportedError("Corelight backend not supported logsources: %s." % lgs_text)
+84 -13
View File
@@ -1,5 +1,8 @@
import re
import requests
import json
import os
from sigma.config.eventdict import event
from fnmatch import fnmatch
from sigma.backends.base import SingleTextQueryBackend
@@ -45,7 +48,7 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB
active = True
#reEscape = re.compile("([\s+\\-=!(){}\\[\\]^\"~:/]|(?<!\\\\)\\\\(?![*?\\\\])|\\\\u|&&|\\|\\|)")
reEscape = re.compile("([\s\s+])")
reEscape = re.compile("([\s\s+()\"])")
reClear = re.compile("[<>]")
andToken = " AND "
orToken = " OR "
@@ -70,20 +73,46 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB
def cleanValue(self, val):
if("[1 to *]" in val):
self.reEscape = re.compile("([()])")
else:
self.reEscape = re.compile("([\s\s+()])")
val = val.strip()
val = super().cleanValue(val)
if isinstance(val, str):
if val.startswith("*"):
val = val.replace("*", "",1)
if val.startswith("\\"):
val = val.replace("\\", "", 1)
if val.startswith("*\\"):
val = val.replace("*\\", "*")
if val.startswith("*/"):
val = val.replace("*/", "*")
if val.startswith("*"):
val = val.replace("*", "")
if val.endswith("\\*"):
val = val.replace("\\*", "*")
if val.endswith("/*"):
val = val.replace("/*", "*")
val = val.strip()
return val
def cleanIPRange(self,value):
new_value = value
if type(new_value) is str and value.find('*') :
sub = value.count('.')
if(value[-2:] == '.*'):
value = value[:-2]
min_ip = value + '.0' * (4 - sub)
new_value = min_ip + '/' + str(8 * (4 - sub))
elif type(new_value) is list:
for index, vl in enumerate(new_value):
new_value[index] = self.cleanIPRange(vl)
return new_value
def generateValueNode(self, node):
result = super().generateValueNode(node)
result = self.valueExpression % (str(node))
if result == "" or result.isspace():
return '""'
else:
@@ -94,16 +123,21 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB
def generateMapItemNode(self, node):
fieldname, value = node
if(fieldname == "EventID" and (type(value) is str or type(value) is int )):
fieldname = self.generateEventKey(value)
value = self.generateEventValue(value)
if fieldname.lower() in self.excluded_fields:
return
else:
transformed_fieldname = self.fieldNameMapping(fieldname, value)
if(transformed_fieldname == "ipaddr"):
value = self.cleanIPRange(value)
if self.mapListsSpecialHandling == False and type(value) in (str, int, list) or self.mapListsSpecialHandling == True and type(value) in (str, int):
#return self.mapExpression % (transformed_fieldname, self.generateNode(value))
if isinstance(value, list):
return self.generateNode([self.mapExpression % (transformed_fieldname, self.cleanValue(item)) for item in value])
elif isinstance(value, str) or isinstance(value, int):
return self.mapExpression % (transformed_fieldname, self.generateNode(value))
return self.mapExpression % (transformed_fieldname, self.generateNode(self.cleanValue(value)))
elif type(value) == list:
return self.generateMapItemListNode(transformed_fieldname, value)
elif isinstance(value, SigmaTypeModifier):
@@ -118,22 +152,59 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB
if expression:
return "(%s%s)" % (self.notToken, expression)
# Function to upload watchlists through CB API
def postAPI(self,result,title,desc):
url = os.getenv("cbapi_watchlist")
body = {
"name":title,
"search_query":"q="+str(result),
"description":desc,
"index_type":"events"
}
header = {
"X-Auth-Token": os.getenv("APIToken")
}
print(title)
x = requests.post(url, data =json.dumps(body), headers = header, verify=False)
print(x.text)
def generateEventKey(self, value):
if (value in event):
return event[value][0]
else:
return 'eventid'
def generateEventValue(self, value):
if (value in event):
return event[value][1]
else:
return ''
def generate(self, sigmaparser):
"""Method is called for each sigma rule and receives the parsed rule (SigmaParser)"""
title = sigmaparser.parsedyaml["title"]
desc = sigmaparser.parsedyaml["description"]
try:
self.category = sigmaparser.parsedyaml['logsource'].setdefault('category', None)
self.counted = sigmaparser.parsedyaml.get('counted', None)
self.excluded_fields = [item.lower() for item in sigmaparser.config.config.get("excludedfields", [])]
except KeyError:
self.category = None
if self.category == "process_creation":
for parsed in sigmaparser.condparsed:
query = self.generateQuery(parsed)
result = ""
for parsed in sigmaparser.condparsed:
query = self.generateQuery(parsed)
result = ""
if query is not None:
result += query
return result
else:
raise NotSupportedError("Not supported logsource category.")
if query is not None:
result += query
# self.postAPI(result,title,desc)
return result
# if self.category == "process_creation":
# for parsed in sigmaparser.condparsed:
# query = self.generateQuery(parsed)
# result = ""
# if query is not None:
# result += query
# return result
# else:
# raise NotSupportedError("Not supported logsource category.")
+1 -1
View File
@@ -25,7 +25,7 @@ from sigma.tools import getAllSubclasses, getClassDict
def getBackendList():
"""Return list of backend classes"""
path = os.path.dirname(__file__)
return frozenset(getAllSubclasses(path, "backends", BaseBackend))
return getAllSubclasses(path, "backends", BaseBackend)
def getBackendDict():
return getClassDict(getBackendList())
+232 -44
View File
@@ -18,33 +18,92 @@ import json
import re
from fnmatch import fnmatch
import sys
import os
from random import randrange
import sigma
import yaml
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
from sigma.parser.condition import ConditionOR, ConditionAND, NodeSubexpression
from .base import BaseBackend, SingleTextQueryBackend
from sigma.config.mapping import ConditionalFieldMapping
from .base import BaseBackend, SingleTextQueryBackend, CorelightQueryBackend
from .mixins import RulenameCommentMixin, MultiRuleOutputMixin
from .exceptions import NotSupportedError
class DeepFieldMappingMixin(object):
def fieldNameMapping(self, fieldname, value):
if isinstance(fieldname, str):
get_config = self.sigmaconfig.fieldmappings.get(fieldname)
if not get_config and '|' in fieldname:
fieldname = fieldname.split('|', 1)[0]
get_config = self.sigmaconfig.fieldmappings.get(fieldname)
if isinstance(get_config, ConditionalFieldMapping):
condition = self.sigmaconfig.fieldmappings.get(fieldname).conditions
for key, item in self.logsource.items():
if condition.get(key) and condition.get(key, {}).get(item):
new_fieldname = condition.get(key, {}).get(item)
if any(new_fieldname):
return super().fieldNameMapping(new_fieldname[0], value)
return super().fieldNameMapping(fieldname, value)
def generate(self, sigmaparser):
self.logsource = sigmaparser.parsedyaml.get("logsource", {})
return super().generate(sigmaparser)
class ElasticsearchWildcardHandlingMixin(object):
"""
Determine field mapping to keyword subfields depending on existence of wildcards in search values. Further,
provide configurability with backend parameters.
"""
options = SingleTextQueryBackend.options + (
("keyword_field", "keyword", "Keyword sub-field name", None),
("keyword_blacklist", None, "Fields that don't have a keyword subfield (wildcards * and ? allowed)", None)
("keyword_field", "keyword", "Keyword sub-field name (default is: '.keyword'). Set blank value if all keyword fields are the base(top-level) field. Additionally see 'keyword_base_fields' for more granular control of the base & subfield situation.", None),
("analyzed_sub_field_name", "", "Analyzed sub-field name. By default analyzed field is the base field. Therefore, use this option to make the analyzed field a subfield. An example value would be '.text' ", None),
("analyzed_sub_fields", None, "Fields that have an analyzed sub-field.", None),
("keyword_base_fields", None, "Fields that the keyword is base (top-level) field. By default analyzed field is the base field. So use this option to change that logic. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None),
("keyword_whitelist", None, "Fields to always set as keyword. Bypasses case insensitive options. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None),
("keyword_blacklist", None, "Fields to never set as keyword (ie: always set as analyzed field). Bypasses case insensitive options. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None),
("case_insensitive_whitelist", None, "Fields to make the values case insensitive regex. Automatically sets the field as a keyword. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None),
("case_insensitive_blacklist", None, "Fields to exclude from being made into case insensitive regex. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None)
)
reContainsWildcard = re.compile("(?:(?<!\\\\)|\\\\\\\\)[*?]").search
uuid_regex = re.compile( "[0-9a-fA-F]{8}(\\\)?-[0-9a-fA-F]{4}(\\\)?-[0-9a-fA-F]{4}(\\\)?-[0-9a-fA-F]{4}(\\\)?-[0-9a-fA-F]{12}", re.IGNORECASE )
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.matchKeyword = True
self.CaseInSensitiveField = False
self.keyword_field = self.keyword_field.strip().strip('.') # Prevent mistake if user added a '.' or field has spaces
self.analyzed_sub_field_name = self.analyzed_sub_field_name.strip().strip('.') # Prevent mistake if user added a '.' or field has spaces
try:
self.blacklist = self.keyword_blacklist.split(",")
self.keyword_base_fields = self.keyword_base_fields.replace(' ','').split(',')
except AttributeError:
self.blacklist = list()
self.keyword_base_fields = list()
try:
self.analyzed_sub_fields = self.analyzed_sub_fields.replace(' ','').split(',')
except AttributeError:
self.analyzed_sub_fields = list()
try:
self.keyword_whitelist = self.keyword_whitelist.replace(' ','').split(',')
except AttributeError:
self.keyword_whitelist = list()
try:
self.keyword_blacklist = self.keyword_blacklist.replace(' ','').split(',')
except AttributeError:
self.keyword_blacklist = list()
try:
self.case_insensitive_whitelist = self.case_insensitive_whitelist.replace(' ','').split(',')
except AttributeError:
self.case_insensitive_whitelist = list()
try:
self.case_insensitive_blacklist = self.case_insensitive_blacklist.replace(' ','').split(',')
except AttributeError:
self.case_insensitive_blacklist = list()
def containsWildcard(self, value):
"""Determine if value contains wildcard."""
@@ -54,26 +113,125 @@ class ElasticsearchWildcardHandlingMixin(object):
else:
return False
def fieldNameMapping(self, fieldname, value):
"""
Determine if values contain wildcards. If yes, match on keyword field else on analyzed one.
Decide if field value should be quoted based on the field name decision and store it in object property.
"""
if self.keyword_field == '':
self.matchKeyword = True
return fieldname
def generateMapItemNode(self, node):
fieldname, value = node
if fieldname.lower().find("hash") != -1:
if isinstance(value, list):
res = []
for item in value:
res.extend([item.lower(), item.upper()])
value = res
elif isinstance(value, str):
value = [value.upper(), value.lower()]
transformed_fieldname = self.fieldNameMapping(fieldname, value)
if self.mapListsSpecialHandling == False and type(value) in (str, int, list) or self.mapListsSpecialHandling == True and type(value) in (str, int):
return self.mapExpression % (transformed_fieldname, self.generateNode(value))
elif type(value) == list:
return self.generateMapItemListNode(transformed_fieldname, value)
elif isinstance(value, SigmaTypeModifier):
return self.generateMapItemTypedNode(transformed_fieldname, value)
elif value is None:
return self.nullExpression % (transformed_fieldname, )
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
if not any([ fnmatch(fieldname, pattern) for pattern in self.blacklist ]) and (
type(value) == list and any(map(self.containsWildcard, value)) \
or self.containsWildcard(value)
):
def fieldNameMapping(self, fieldname, value, *agg_option):
"""
Decide whether to use a keyword field or analyzed field. Using options on fields to make into keywords OR not and the field naming of keyword.
Further, determine if values contain wildcards. Additionally, determine if case insensitive regex should be used. Finally,
if field value should be quoted based on the field name decision and store it in object property.
"""
force_keyword_whitelist = False # override everything AND set keyword and turn off case insensitivity
force_keyword_blacklist = False # override everything AND set analyzed field and turn off case insensitivity
force_keyword_type = False # make keyword
keyword_subfield_name = self.keyword_field
analyzed_subfield_name = self.analyzed_sub_field_name
# Set naming for keyword fields
if keyword_subfield_name == '':
force_keyword_type = True
elif len(self.keyword_base_fields) != 0 and any ([ fnmatch(fieldname, pattern) for pattern in self.keyword_base_fields ]):
keyword_subfield_name = ''
else:
keyword_subfield_name = '.%s'%keyword_subfield_name
# Set naming for analyzed fields
if analyzed_subfield_name != '' and not keyword_subfield_name.startswith('.'):
analyzed_subfield_name = '.%s'%analyzed_subfield_name
else:
analyzed_subfield_name = ''
# force keyword on agg_option used in Elasticsearch DSL query key
if agg_option:
force_keyword_type = True
# Only some analyzed subfield, so if not in this list then has to be keyword
if len(self.analyzed_sub_fields) != 0 and not any ([ fnmatch(fieldname, pattern) for pattern in self.analyzed_sub_fields ]):
force_keyword_type = True
# Keyword (force) exclude
if len(self.keyword_blacklist) != 0 and any ([ fnmatch(fieldname, pattern.strip()) for pattern in self.keyword_blacklist ]):
force_keyword_blacklist = True
# Keyword (force) include
elif len(self.keyword_whitelist) != 0 and any ([ fnmatch(fieldname, pattern.strip()) for pattern in self.keyword_whitelist ]):
force_keyword_whitelist = True
# Set case insensitive regex
if not (len( self.case_insensitive_blacklist ) != 0 and any([ fnmatch( fieldname, pattern ) for pattern in self.case_insensitive_blacklist ])) and len( self.case_insensitive_whitelist ) != 0 and any([ fnmatch( fieldname, pattern ) for pattern in self.case_insensitive_whitelist ]):
self.CaseInSensitiveField = True
else:
self.CaseInSensitiveField = False
# Set type and value
if force_keyword_blacklist:
self.matchKeyword = False
self.CaseInSensitiveField = False
elif force_keyword_whitelist:
self.matchKeyword = True
self.CaseInSensitiveField = False
elif force_keyword_type:
self.matchKeyword = True
elif self.CaseInSensitiveField:
self.matchKeyword = True
elif (type(value) == list and any(map(self.containsWildcard, value))) or self.containsWildcard(value):
self.matchKeyword = True
return fieldname + "." + self.keyword_field
else:
self.matchKeyword = False
return fieldname
class ElasticsearchQuerystringBackend(ElasticsearchWildcardHandlingMixin, SingleTextQueryBackend):
# Return compiled field name
if self.matchKeyword:
return '%s%s'%(fieldname, keyword_subfield_name)
else:
return '%s%s'%(fieldname, analyzed_subfield_name)
def makeCaseInSensitiveValue(self, value):
"""
Returns dictionary of if should be a regex (`is_regex`) and if regex the query value ('value')
Converts the query(value) into a case insensitive regular expression (regex). ie: 'http' would get converted to '[hH][tT][pP][pP]'
Adds the beginning and ending '/' to make regex query if still determined that it should be a regex
"""
if value and not value == 'null' and not re.match(r'^/.*/$', value) and (re.search('[a-zA-Z]', value) and not re.match(self.uuid_regex, value) or self.containsWildcard(value)): # re.search for alpha is fastest:
# Turn single ending '\\' into non escaped (ie: '\\*')
#value = re.sub( r"((?<!\\)(\\))\*$", "\g<1>\\*", value )
# Make upper/lower
value = re.sub( r"[A-Za-z]", lambda x: "[" + x.group( 0 ).upper() + x.group( 0 ).lower() + "]", value )
# Turn `*` into wildcard, only if odd number of '\'(because this would mean already escaped)
value = re.sub( r"(((?<!\\)(\\\\)+)|(?<!\\))\*", "\g<1>.*", value )
# Escape additional values that are treated as specific "operators" within Elastic. (ie: @, ?, &, <, >, and ~)
# reference: https://www.elastic.co/guide/en/elasticsearch/reference/current/regexp-syntax.html#regexp-optional-operators
value = re.sub( r"(((?<!\\)(\\\\)+)|(?<!\\))([@?&~<>])", "\g<1>\\\\\g<4>", value )
# Validate regex
try:
re.compile(value)
return {'is_regex': True, 'value': value}
# Regex failed
except re.error:
raise TypeError( "Regular expression validation error for: '%s')" %str(value) )
else:
return { 'is_regex': False, 'value': value }
class ElasticsearchQuerystringBackend(DeepFieldMappingMixin, ElasticsearchWildcardHandlingMixin, SingleTextQueryBackend):
"""Converts Sigma rule into Elasticsearch query string. Only searches, no aggregations."""
identifier = "es-qs"
active = True
@@ -101,6 +259,11 @@ class ElasticsearchQuerystringBackend(ElasticsearchWildcardHandlingMixin, Single
return '""'
else:
if self.matchKeyword: # don't quote search value on keyword field
if self.CaseInSensitiveField:
make_ci = self.makeCaseInSensitiveValue(result)
result = make_ci.get('value')
if make_ci.get('is_regex'): # Determine if still should be a regex
result = "/%s/" % result # Regex place holders for regex
return result
else:
return "\"%s\"" % result
@@ -133,7 +296,12 @@ class ElasticsearchQuerystringBackend(ElasticsearchWildcardHandlingMixin, Single
else:
return super().generateSubexpressionNode(node)
class ElasticsearchDSLBackend(RulenameCommentMixin, ElasticsearchWildcardHandlingMixin, BaseBackend):
class ElasticsearchCorelightBackend(CorelightQueryBackend, ElasticsearchQuerystringBackend):
identifier = "corelight_es-qs"
class ElasticsearchDSLBackend(DeepFieldMappingMixin, RulenameCommentMixin, ElasticsearchWildcardHandlingMixin, BaseBackend):
"""ElasticSearch DSL backend"""
identifier = 'es-dsl'
active = True
@@ -280,12 +448,12 @@ class ElasticsearchDSLBackend(RulenameCommentMixin, ElasticsearchWildcardHandlin
self.queries[-1]['aggs'] = {
count_agg_group_name: {
"terms": {
"field": "{}.keyword".format(agg.groupfield)
"field": "{}".format(agg.groupfield)
},
"aggs": {
count_distinct_agg_name: {
"cardinality": {
"field": "{}.keyword".format(agg.aggfield)
"field": "{}".format(agg.aggfield)
}
},
"limit": {
@@ -304,7 +472,7 @@ class ElasticsearchDSLBackend(RulenameCommentMixin, ElasticsearchWildcardHandlin
self.queries[-1]['aggs'] = {
group_aggname: {
'terms': {
'field': '%s' % (agg.groupfield + ".keyword")
'field': '%s' % (agg.groupfield)
},
'aggs': {
'limit': {
@@ -452,7 +620,8 @@ class KibanaBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin):
if self.output_type == "import": # output format that can be imported via Kibana UI
for item in self.kibanaconf: # JSONize kibanaSavedObjectMeta.searchSourceJSON
item['_source']['kibanaSavedObjectMeta']['searchSourceJSON'] = json.dumps(item['_source']['kibanaSavedObjectMeta']['searchSourceJSON'])
return json.dumps(self.kibanaconf, indent=2)
if self.kibanaconf:
return json.dumps(self.kibanaconf, indent=2)
elif self.output_type == "curl":
for item in self.indexsearch:
return item
@@ -475,6 +644,11 @@ class KibanaBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin):
def index_variable_name(self, index):
return "index_" + index.replace("-", "__").replace("*", "X")
class KibanaCorelightBackend(CorelightQueryBackend, KibanaBackend):
identifier = "corelight_kibana"
class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin):
"""Converts Sigma Rule into X-Pack Watcher JSON for alerting"""
identifier = "xpack-watcher"
@@ -563,7 +737,7 @@ class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin)
"aggs": {
"agg": {
"terms": {
"field": condition.parsedAgg.aggfield + ".keyword",
"field": condition.parsedAgg.aggfield,
"size": 10,
"order": {
"_count": order
@@ -581,7 +755,7 @@ class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin)
"aggs": {
"by": {
"terms": {
"field": condition.parsedAgg.groupfield + ".keyword",
"field": condition.parsedAgg.groupfield,
"size": 10,
"order": {
"_count": order
@@ -781,7 +955,11 @@ class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin)
raise NotImplementedError("Output type '%s' not supported" % self.output_type)
return result
class ElastalertBackend(MultiRuleOutputMixin):
class XPackWatcherCorelightBackend(CorelightQueryBackend, XPackWatcherBackend):
identifier = "corelight_xpack-watcher"
class ElastalertBackend(DeepFieldMappingMixin, MultiRuleOutputMixin):
"""Elastalert backend"""
active = True
supported_alert_methods = {'email', 'http_post'}
@@ -993,6 +1171,7 @@ class ElastalertBackendQs(ElastalertBackend, ElasticsearchQuerystringBackend):
return [{ 'query' : { 'query_string' : { 'query' : super().generateQuery(parsed) } } }]
class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend):
identifier = "elasticsearch-rule"
active = True
@@ -1013,16 +1192,19 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend):
except (IOError, OSError) as e:
print("Failed to open {} configuration file '%s': %s".format(path, str(e)), file=sys.stderr)
return []
except json.JSONDecoder as e:
except json.JSONDecodeError as e:
print("Failed to parse {} configuration file '%s' as valid YAML: %s" % (path, str(e)), file=sys.stderr)
return []
def generate(self, sigmaparser):
translation = super().generate(sigmaparser)
if translation:
index = sigmaparser.get_logsource().index
if len(index) == 0:
index = ["apm-*-transaction", "auditbeat-*", "endgame-*", "filebeat-*", "packetbeat-*", "winlogbeat-*"]
configs = sigmaparser.parsedyaml
configs.update({"translation": translation})
rule = self.create_rule(configs)
rule = self.create_rule(configs, index)
return rule
@@ -1071,16 +1253,18 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend):
elif level == "critical":
return randrange(74,101)
def create_rule(self, configs):
def create_rule(self, configs, index):
tags = configs.get("tags", [])
tactics_list = list()
technics_list = list()
new_tags = list()
for tag in tags:
tag = tag.replace("attack.", "")
if re.match("[t][0-9]{4}", tag, re.IGNORECASE):
tech = self.find_technique(tag.title())
if tech:
new_tags.append(tag.title())
technics_list.append(tech)
else:
if "_" in tag:
@@ -1088,33 +1272,33 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend):
tag_list = [item.title() for item in tag_list]
tact = self.find_tactics(key_name=" ".join(tag_list))
if tact:
new_tags.append(" ".join(tag_list))
tactics_list.append(tact)
elif re.match("[ta][0-9]{4}", tag, re.IGNORECASE):
tact = self.find_tactics(key_id=tag.upper())
if tact:
new_tags.append(tag.upper())
tactics_list.append(tact)
else:
tact = self.find_tactics(key_name=tag.title())
if tact:
new_tags.append(tag.title())
tactics_list.append(tact)
threat = self.create_threat_description(tactics_list=tactics_list, techniques_list=technics_list)
rule_id = configs.get("title", "").lower().replace(" ", "_")
rule_name = configs.get("title", "").lower()
rule_id = re.sub(re.compile('[()*+!,\[\].\s"]'), "_", rule_name)
risk_score = self.map_risk_score(configs.get("level", "medium"))
references = configs.get("reference")
if references is None:
references = configs.get("references")
rule = {
"description": configs.get("description", ""),
"enabled": True,
"false_positives": configs.get('falsepositives'),
"false_positives": configs.get('falsepositives', "Unkown"),
"filters": [],
"from": "now-360s",
"immutable": False,
"index": [
"apm-*-transaction*",
"auditbeat-*",
"endgame-*",
"filebeat-*",
"packetbeat-*",
"winlogbeat-*"
],
"index": index,
"interval": "5m",
"rule_id": rule_id,
"language": "lucene",
@@ -1123,15 +1307,19 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend):
"risk_score": risk_score,
"name": configs.get("title", ""),
"query":configs.get("translation"),
"references": configs.get("references"),
"meta": {
"from": "1m"
},
"severity": configs.get("level", "medium"),
"tags": tags,
"tags": new_tags,
"to": "now",
"type": "query",
"threat": threat,
"version": 1
}
return json.dumps(rule)
if references:
rule.update({"references": references})
return json.dumps(rule)
class ElasticSearchRuleCorelightBackend(CorelightQueryBackend, ElasticSearchRuleBackend):
identifier = "corelight_elasticsearch-rule"
+160
View File
@@ -0,0 +1,160 @@
# Output backends for sigmac
# Copyright 2016-2018 Thomas Patzke, Florian Roth, Roey
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
from sigma.parser.condition import SigmaAggregationParser
from .base import SingleTextQueryBackend
from .mixins import MultiRuleOutputMixin
class HumioBackend(SingleTextQueryBackend):
"""Converts Sigma rule into Humio query."""
identifier = "humio"
active = True
reEscape = re.compile('("|(?<!\\\\)\\\\(?![*?\\\\]))')
reClear = None
andToken = " "
orToken = " or "
notToken = "!"
subExpression = "%s"
listExpression = "%s"
listSeparator = " "
valueExpression = "\"%s\""
nullExpression = "NOT %s=\"*\""
notNullExpression = "%s=\"*\""
mapExpression = "%s=%s"
regexExpression = "regex(\"%s=(\\\"%s\\\")\")"
mapListsSpecialHandling = True
mapListValueExpression = "%s IN %s"
typedValueExpression = {
SigmaRegularExpressionModifier: "/%s/"
}
def generateMapItemNode(self, node):
key, value = node
if isinstance(value, SigmaRegularExpressionModifier):# or isinstance(value, str) and "*" in value :
return self.regexExpression % (key, self.cleanValue(value))
else:
return super().generateMapItemNode(node)
def generateNOTNode(self, node):
generated = self.generateNode(node.item)
if generated is not None:
return "%s(%s)" % (self.notToken, generated)
else:
return None
def generateANDNode(self, node):
generated = [self.generateNode(val) for val in node]
filtered = [g for g in generated if g is not None]
if filtered:
if self.sort_condition_lists:
filtered = sorted(filtered)
if any([item for item in filtered if "regex" in item]):
res = ""
for item in filtered:
if item.startswith("regex"):
if res.endswith(" | "):
res = res.rstrip(" | ")
res += " | %s | " % item.strip(" | ")
else:
res += item
return res.strip(" | ")
return self.andToken.join(filtered)
else:
return None
def generateORNode(self, node):
generated = [self.generateNode(val) for val in node]
filtered = [g.strip(" | ") for g in generated if g is not None]
if filtered:
if self.sort_condition_lists:
filtered = sorted(filtered)
if any([item for item in filtered if "regex" in item]):
res = ""
for item in filtered:
if item.startswith("regex"):
if res.endswith(" | "):
res = res.rstrip(" | ")
res += " | %s | " % item.strip(" | ")
else:
res += item
return res.strip(" | ")
return self.orToken.join(filtered)
else:
return None
def cleanValue(self, val):
if isinstance(val, SigmaRegularExpressionModifier):
val = val.value
if "\\" in val:
val = re.sub(r"\\", r"\\\\\\", val)
# if (val.startswith("*") or val.endswith("*")) and "\\" in val:
# val = re.sub(r"\\", r"\\\\\\", val)
return super().cleanValue(val)
def generateMapItemListNode(self, key, value):
if isinstance(value, SigmaRegularExpressionModifier):
key_mapped = self.fieldNameMapping(key, value)
return {'regexp': {key_mapped: str(value)}}
# if any([item for item in value if "*" in item]):
# return (" | " + " | ".join([self.regexExpression % (key, self.cleanValue(item)) for item in value]) + " | ")
if not set([type(val) for val in value]).issubset({str, int}):
raise TypeError("List values must be strings or numbers")
return (" or ".join(['%s=%s' % (key, self.generateValueNode(item)) for item in value]))
def generateAggregation(self, agg):
if agg == None:
return ""
if agg.aggfunc == SigmaAggregationParser.AGGFUNC_NEAR:
raise NotImplementedError("The 'near' aggregation operator is not yet implemented for this backend")
if agg.groupfield == None:
if agg.aggfunc_notrans == 'count':
if agg.aggfield == None :
return " | val := count() | val %s %s" % (agg.cond_op, agg.condition)
else:
agg.aggfunc_notrans = 'dc'
return " | count(field=%s, distinct=true, as=val) | val %s %s" % (agg.aggfield or "", agg.cond_op, agg.condition)
else:
if agg.aggfunc_notrans == 'count':
if agg.aggfield == None :
return " | val := count(field=%s) | val %s %s" % (agg.groupfield or "", agg.cond_op, agg.condition)
else:
agg.aggfunc_notrans = 'dc'
return " | groupby(field=%s, function=count(field=%s, distinct=true, as=val)) | val %s %s" % (agg.groupfield or "", agg.aggfield or "", agg.cond_op, agg.condition)
def generate(self, sigmaparser):
"""Method is called for each sigma rule and receives the parsed rule (SigmaParser)"""
for parsed in sigmaparser.condparsed:
query = self.generateQuery(parsed)
#before = self.generateBefore(parsed)
#after = self.generateAfter(parsed)
result = ""
# if before is not None:
# result = before
if query is not None:
result += query
# if after is not None:
# result += after
if result.endswith(" | "):
result = result.strip(" | ")
return result
+609 -604
View File
@@ -1,604 +1,609 @@
# LimaCharlie backend for sigmac created by LimaCharlie.io
# Copyright 2019 Refraction Point, Inc
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import yaml
from collections import namedtuple
from .base import BaseBackend
from sigma.parser.modifiers.base import SigmaTypeModifier
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
# A few helper functions for cases where field mapping cannot be done
# as easily one by one, or can be done more efficiently.
def _windowsEventLogFieldName(fieldName):
if 'EventID' == fieldName:
return 'Event/System/EventID'
return 'Event/EventData/%s' % (fieldName,)
def _mapProcessCreationOperations(node):
# Here we fix some common pitfalls found in rules
# in a consistent fashion (already processed to D&R rule).
# First fixup is looking for a specific path prefix
# based on a specific drive letter. There are many cases
# where the driver letter can change or where the early
# boot process refers to it as "\Device\HarddiskVolume1\".
if ("starts with" == node["op"] and
"event/FILE_PATH" == node["path"] and
node["value"].lower().startswith("c:\\")):
node["op"] = "matches"
node["re"] = "^(?:(?:.:)|(?:\\\\Device\\\\HarddiskVolume.))\\\\%s" % (re.escape(node["value"][3:]),)
del(node["value"])
return node
# We support many different log sources so we keep different mapping depending
# on the log source and category.
# The mapping key is product/category/service.
# The mapping value is tuple like:
# - top-level parameters
# - pre-condition is a D&R rule node filtering relevant events.
# - field mappings is a dict with a mapping or a callable to convert the field name.
# Individual mapping values can also be callabled(fieldname, value) returning a new fieldname and value.
# - isAllStringValues is a bool indicating whether all values should be converted to string.
# - keywordField is the field name to alias for keywords if supported or None if not.
# - postOpMapper is a callback that can modify an operation once it has been generated.
SigmaLCConfig = namedtuple('SigmaLCConfig', [
'topLevelParams',
'preConditions',
'fieldMappings',
'isAllStringValues',
'keywordField',
'postOpMapper',
])
_allFieldMappings = {
"windows/process_creation/": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is windows",
},
fieldMappings = {
"CommandLine": "event/COMMAND_LINE",
"Image": "event/FILE_PATH",
"ParentImage": "event/PARENT/FILE_PATH",
"ParentCommandLine": "event/PARENT/COMMAND_LINE",
"User": "event/USER_NAME",
"OriginalFileName": "event/ORIGINAL_FILE_NAME",
# Custom field names coming from somewhere unknown.
"NewProcessName": "event/FILE_PATH",
"ProcessCommandLine": "event/COMMAND_LINE",
# Another one-off command line.
"Command": "event/COMMAND_LINE",
},
isAllStringValues = False,
keywordField = "event/COMMAND_LINE",
postOpMapper = _mapProcessCreationOperations
),
"windows//": SigmaLCConfig(
topLevelParams = {
"target": "log",
"log type": "wel",
},
preConditions = None,
fieldMappings = _windowsEventLogFieldName,
isAllStringValues = True,
keywordField = None,
postOpMapper = None
),
"windows_defender//": SigmaLCConfig(
topLevelParams = {
"target": "log",
"log type": "wel",
},
preConditions = None,
fieldMappings = _windowsEventLogFieldName,
isAllStringValues = True,
keywordField = None,
postOpMapper = None
),
"dns//": SigmaLCConfig(
topLevelParams = {
"event": "DNS_REQUEST",
},
preConditions = None,
fieldMappings = {
"query": "event/DOMAIN_NAME",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
"linux//": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is linux",
},
fieldMappings = {
"exe": "event/FILE_PATH",
"type": None,
},
isAllStringValues = False,
keywordField = 'event/COMMAND_LINE',
postOpMapper = None
),
"unix//": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is linux",
},
fieldMappings = {
"exe": "event/FILE_PATH",
"type": None,
},
isAllStringValues = False,
keywordField = 'event/COMMAND_LINE',
postOpMapper = None
),
"netflow//": SigmaLCConfig(
topLevelParams = {
"event": "NETWORK_CONNECTIONS",
},
preConditions = None,
fieldMappings = {
"destination.port": "event/NETWORK_ACTIVITY/DESTINATION/PORT",
"source.port": "event/NETWORK_ACTIVITY/SOURCE/PORT",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
"/proxy/": SigmaLCConfig(
topLevelParams = {
"event": "HTTP_REQUEST",
},
preConditions = None,
fieldMappings = {
"c-uri|contains": "event/URL",
"c-uri": "event/URL",
"URL": "event/URL",
"cs-uri-query": "event/URL",
"cs-uri-stem": "event/URL",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
}
class LimaCharlieBackend(BaseBackend):
"""Converts Sigma rule into LimaCharlie D&R rules. Contributed by LimaCharlie. https://limacharlie.io"""
identifier = "limacharlie"
active = True
config_required = False
default_config = ["limacharlie"]
def generate(self, sigmaparser):
# Take the log source information and figure out which set of mappings to use.
ruleConfig = sigmaparser.parsedyaml
ls_rule = ruleConfig['logsource']
try:
category = ls_rule['category']
except KeyError:
category = ""
try:
product = ls_rule['product']
except KeyError:
product = ""
# try:
# service = ls_rule['service']
# except KeyError:
# service = ""
# Don't use service for now, most Windows Event Logs
# uses a different service with no category, since we
# treat all Windows Event Logs together we can ignore
# the service.
service = ""
# See if we have a definition for the source combination.
mappingKey = "%s/%s/%s" % (product, category, service)
topFilter, preCond, mappings, isAllStringValues, keywordField, postOpMapper = _allFieldMappings.get(mappingKey, tuple([None, None, None, None, None, None]))
if mappings is None:
raise NotImplementedError("Log source %s/%s/%s not supported by backend." % (product, category, service))
# Field name conversions.
self._fieldMappingInEffect = mappings
# LC event type pre-selector for the type of data.
self._preCondition = preCond
# Are all the values treated as strings?
self._isAllStringValues = isAllStringValues
# Are we supporting keywords full text search?
self._keywordField = keywordField
# Call to fixup all operations after the fact.
self._postOpMapper = postOpMapper
# Call the original generation code.
detectComponent = super().generate(sigmaparser)
# We expect a string (yaml) as output, so if
# we get anything else we assume it's a core
# library value and just return it as-is.
if not isinstance( detectComponent, str):
return detectComponent
# This redundant to deserialize it right after
# generating the yaml, but we try to use the parent
# official class code as much as possible for future
# compatibility.
detectComponent = yaml.safe_load(detectComponent)
# Check that we got a proper node and not just a string
# which we don't really know what to do with.
if not isinstance(detectComponent, dict):
raise NotImplementedError("Selection combination not supported.")
# Apply top level filter.
detectComponent.update(topFilter)
# Now prepare the Response component.
respondComponents = [{
"action": "report",
"name": ruleConfig["title"],
}]
# Add a lot of the metadata available to the report.
if ruleConfig.get("tags", None) is not None:
respondComponents[0].setdefault("metadata", {})["tags"] = ruleConfig["tags"]
if ruleConfig.get("description", None) is not None:
respondComponents[0].setdefault("metadata", {})["description"] = ruleConfig["description"]
if ruleConfig.get("references", None) is not None:
respondComponents[0].setdefault("metadata", {})["references"] = ruleConfig["references"]
if ruleConfig.get("level", None) is not None:
respondComponents[0].setdefault("metadata", {})["level"] = ruleConfig["level"]
if ruleConfig.get("author", None) is not None:
respondComponents[0].setdefault("metadata", {})["author"] = ruleConfig["author"]
if ruleConfig.get("falsepositives", None) is not None:
respondComponents[0].setdefault("metadata", {})["falsepositives"] = ruleConfig["falsepositives"]
# Assemble it all as a single, complete D&R rule.
return yaml.safe_dump({
"detect": detectComponent,
"respond": respondComponents,
}, default_flow_style = False)
def generateQuery(self, parsed):
# We override the generateQuery function because
# we generate proper JSON structures internally
# and only convert to string (yaml) once the
# whole thing is assembled.
result = self.generateNode(parsed.parsedSearch)
if self._preCondition is not None:
result = {
"op": "and",
"rules": [
self._preCondition,
result,
]
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return yaml.safe_dump(result)
def generateANDNode(self, node):
generated = [ self.generateNode(val) for val in node ]
filtered = [ g for g in generated if g is not None ]
if not filtered:
return None
# Map any possible keywords.
filtered = self._mapKeywordVals(filtered)
if 1 == len(filtered):
if self._postOpMapper is not None:
filtered[0] = self._postOpMapper(filtered[0])
return filtered[0]
result = {
"op": "and",
"rules": filtered,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
def generateORNode(self, node):
generated = [self.generateNode(val) for val in node]
filtered = [g for g in generated if g is not None]
if not filtered:
return None
# Map any possible keywords.
filtered = self._mapKeywordVals(filtered)
if 1 == len(filtered):
if self._postOpMapper is not None:
filtered[0] = self._postOpMapper(filtered[0])
return filtered[0]
result = {
"op": "or",
"rules": filtered,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
def generateNOTNode(self, node):
generated = self.generateNode(node.item)
if generated is None:
return None
if not isinstance(generated, dict):
raise NotImplementedError("Not operator not available on non-dict nodes.")
generated["not"] = not generated.get("not", False)
return generated
def generateSubexpressionNode(self, node):
return self.generateNode(node.items)
def generateListNode(self, node):
return [self.generateNode(value) for value in node]
def generateMapItemNode(self, node):
fieldname, value = node
fieldNameAndValCallback = None
# The mapping can be a dictionary of mapping or a callable
# to get the correct value.
if callable(self._fieldMappingInEffect):
fieldname = self._fieldMappingInEffect(fieldname)
else:
try:
# The mapping can also be a callable that will
# return a mapped key AND value.
if callable(self._fieldMappingInEffect[fieldname]):
fieldNameAndValCallback = self._fieldMappingInEffect[fieldname]
else:
fieldname = self._fieldMappingInEffect[fieldname]
except:
raise NotImplementedError("Field name %s not supported by backend." % (fieldname,))
# If fieldname returned is None, it's a special case where we
# ignore the node.
if fieldname is None:
return None
if isinstance(value, (int, str)):
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
op, newVal = self._valuePatternToLcOp(value)
newOp = {
"op": op,
"path": fieldname,
"case sensitive": False,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
if self._postOpMapper is not None:
newOp = self._postOpMapper(newOp)
return newOp
elif isinstance(value, list):
subOps = []
for v in value:
if fieldNameAndValCallback is not None:
fieldname, v = fieldNameAndValCallback(fieldname, v)
op, newVal = self._valuePatternToLcOp(v)
newOp = {
"op": op,
"path": fieldname,
"case sensitive": False,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
if self._postOpMapper is not None:
newOp = self._postOpMapper(newOp)
subOps.append(newOp)
if 1 == len(subOps):
return subOps[0]
return {
"op": "or",
"rules": subOps
}
elif isinstance(value, SigmaTypeModifier):
if isinstance(value, SigmaRegularExpressionModifier):
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
result = {
"op": "matches",
"path": fieldname,
"re": re.compile(value),
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
else:
raise TypeError("Backend does not support TypeModifier: %s" % (str(type(value))))
elif value is None:
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
result = {
"op": "exists",
"not": True,
"path": fieldname,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
def generateValueNode(self, node):
return node
def _valuePatternToLcOp(self, val):
# Here we convert the string values supported by Sigma that
# can include wildcards into either proper values (string or int)
# or into altered values to be functionally equivalent using
# a few different LC D&R rule operators.
# No point evaluating non-strings.
if not isinstance(val, str):
return ("is", str(val) if self._isAllStringValues else val)
# Is there any wildcard in this string? If not, we can short circuit.
if "*" not in val and "?" not in val:
return ("is", val)
# Now we do a small optimization for the shortcut operators
# available in LC. We try to see if the wildcards are around
# the main value, but NOT within. If that's the case we can
# use the "starts with", "ends with" or "contains" operators.
isStartsWithWildcard = False
isEndsWithWildcard = False
tmpVal = val
if tmpVal.startswith("*"):
isStartsWithWildcard = True
tmpVal = tmpVal[1:]
if tmpVal.endswith("*") and not (tmpVal.endswith("\\*") and not tmpVal.endswith("\\\\*")):
isEndsWithWildcard = True
if tmpVal.endswith("\\\\*"):
# An extra \ had to be there so it didn't escapte the
# *, but since we plan on removing the *, we can also
# remove one \.
tmpVal = tmpVal[:-2]
else:
tmpVal = tmpVal[:-1]
# Check to see if there are any other wildcards. If there are
# we cannot use our shortcuts.
if "*" not in tmpVal and "?" not in tmpVal:
if isStartsWithWildcard and isEndsWithWildcard:
return ("contains", tmpVal)
if isStartsWithWildcard:
return ("ends with", tmpVal)
if isEndsWithWildcard:
return ("starts with", tmpVal)
# This is messy, but it is accurate in generating a RE based on
# the simplified wildcard system, while also supporting the
# escaping of those wildcards.
segments = []
tmpVal = val
while True:
nEscapes = 0
for i in range(len(tmpVal)):
# We keep a running count of backslash escape
# characters we see so that if we meet a wildcard
# we can tell whether the wildcard is escaped
# (with odd number of escapes) or if it's just a
# backslash literal before a wildcard (even number).
if "\\" == tmpVal[i]:
nEscapes += 1
continue
if "*" == tmpVal[i]:
if 0 == nEscapes:
segments.append(re.escape(tmpVal[:i]))
segments.append(".*")
elif nEscapes % 2 == 0:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i])
segments.append(".*")
else:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i + 1])
tmpVal = tmpVal[i + 1:]
break
if "?" == tmpVal[i]:
if 0 == nEscapes:
segments.append(re.escape(tmpVal[:i]))
segments.append(".")
elif nEscapes % 2 == 0:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i])
segments.append(".")
else:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i + 1])
tmpVal = tmpVal[i + 1:]
break
nEscapes = 0
else:
segments.append(re.escape(tmpVal))
break
val = ''.join(segments)
return ("matches", val)
def _mapKeywordVals(self, values):
# This function ensures that the list of values passed
# are proper D&R operations, if they are strings it indicates
# they were requested as keyword matches. We only support
# keyword matches when specified in the config. We generally just
# map them to the most common field in LC that makes sense.
mapped = []
for val in values:
# Non-keywords are just passed through.
if not isinstance(val, str):
mapped.append(val)
continue
if self._keywordField is None:
raise NotImplementedError("Full-text keyboard searches not supported.")
# This seems to be indicative only of "keywords" which are mostly
# representative of full-text searches. We don't suport that but
# in some data sources we can alias them to an actual field.
op, newVal = self._valuePatternToLcOp(val)
newOp = {
"op": op,
"path": self._keywordField,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
mapped.append(newOp)
return mapped
# LimaCharlie backend for sigmac created by LimaCharlie.io
# Copyright 2019 Refraction Point, Inc
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import yaml
from collections import namedtuple
from .base import BaseBackend
from sigma.parser.modifiers.base import SigmaTypeModifier
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
# A few helper functions for cases where field mapping cannot be done
# as easily one by one, or can be done more efficiently.
def _windowsEventLogFieldName(fieldName):
if 'EventID' == fieldName:
return 'Event/System/EventID'
return 'Event/EventData/%s' % (fieldName,)
def _mapProcessCreationOperations(node):
# Here we fix some common pitfalls found in rules
# in a consistent fashion (already processed to D&R rule).
# First fixup is looking for a specific path prefix
# based on a specific drive letter. There are many cases
# where the driver letter can change or where the early
# boot process refers to it as "\Device\HarddiskVolume1\".
if ("starts with" == node["op"] and
"event/FILE_PATH" == node["path"] and
node["value"].lower().startswith("c:\\")):
node["op"] = "matches"
node["re"] = "^(?:(?:.:)|(?:\\\\Device\\\\HarddiskVolume.))\\\\%s" % (re.escape(node["value"][3:]),)
del(node["value"])
return node
# We support many different log sources so we keep different mapping depending
# on the log source and category.
# The mapping key is product/category/service.
# The mapping value is tuple like:
# - top-level parameters
# - pre-condition is a D&R rule node filtering relevant events.
# - field mappings is a dict with a mapping or a callable to convert the field name.
# Individual mapping values can also be callabled(fieldname, value) returning a new fieldname and value.
# - isAllStringValues is a bool indicating whether all values should be converted to string.
# - keywordField is the field name to alias for keywords if supported or None if not.
# - postOpMapper is a callback that can modify an operation once it has been generated.
SigmaLCConfig = namedtuple('SigmaLCConfig', [
'topLevelParams',
'preConditions',
'fieldMappings',
'isAllStringValues',
'keywordField',
'postOpMapper',
])
_allFieldMappings = {
"windows/process_creation/": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is windows",
},
fieldMappings = {
"CommandLine": "event/COMMAND_LINE",
"Image": "event/FILE_PATH",
"ParentImage": "event/PARENT/FILE_PATH",
"ParentCommandLine": "event/PARENT/COMMAND_LINE",
"User": "event/USER_NAME",
"OriginalFileName": "event/ORIGINAL_FILE_NAME",
# Custom field names coming from somewhere unknown.
"NewProcessName": "event/FILE_PATH",
"ProcessCommandLine": "event/COMMAND_LINE",
# Another one-off command line.
"Command": "event/COMMAND_LINE",
},
isAllStringValues = False,
keywordField = "event/COMMAND_LINE",
postOpMapper = _mapProcessCreationOperations
),
"windows//": SigmaLCConfig(
topLevelParams = {
"target": "log",
"log type": "wel",
},
preConditions = None,
fieldMappings = _windowsEventLogFieldName,
isAllStringValues = True,
keywordField = None,
postOpMapper = None
),
"windows_defender//": SigmaLCConfig(
topLevelParams = {
"target": "log",
"log type": "wel",
},
preConditions = None,
fieldMappings = _windowsEventLogFieldName,
isAllStringValues = True,
keywordField = None,
postOpMapper = None
),
"dns//": SigmaLCConfig(
topLevelParams = {
"event": "DNS_REQUEST",
},
preConditions = None,
fieldMappings = {
"query": "event/DOMAIN_NAME",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
"linux//": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is linux",
},
fieldMappings = {
"exe": "event/FILE_PATH",
"type": None,
},
isAllStringValues = False,
keywordField = 'event/COMMAND_LINE',
postOpMapper = None
),
"unix//": SigmaLCConfig(
topLevelParams = {
"events": [
"NEW_PROCESS",
"EXISTING_PROCESS",
]
},
preConditions = {
"op": "is linux",
},
fieldMappings = {
"exe": "event/FILE_PATH",
"type": None,
},
isAllStringValues = False,
keywordField = 'event/COMMAND_LINE',
postOpMapper = None
),
"netflow//": SigmaLCConfig(
topLevelParams = {
"event": "NETWORK_CONNECTIONS",
},
preConditions = None,
fieldMappings = {
"destination.port": "event/NETWORK_ACTIVITY/DESTINATION/PORT",
"source.port": "event/NETWORK_ACTIVITY/SOURCE/PORT",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
"/proxy/": SigmaLCConfig(
topLevelParams = {
"event": "HTTP_REQUEST",
},
preConditions = None,
fieldMappings = {
"c-uri|contains": "event/URL",
"c-uri": "event/URL",
"URL": "event/URL",
"cs-uri-query": "event/URL",
"cs-uri-stem": "event/URL",
},
isAllStringValues = False,
keywordField = None,
postOpMapper = None
),
}
class LimaCharlieBackend(BaseBackend):
"""Converts Sigma rule into LimaCharlie D&R rules. Contributed by LimaCharlie. https://limacharlie.io"""
identifier = "limacharlie"
active = True
config_required = False
default_config = ["limacharlie"]
def generate(self, sigmaparser):
# Take the log source information and figure out which set of mappings to use.
ruleConfig = sigmaparser.parsedyaml
ls_rule = ruleConfig['logsource']
try:
category = ls_rule['category']
except KeyError:
category = ""
try:
product = ls_rule['product']
except KeyError:
product = ""
# try:
# service = ls_rule['service']
# except KeyError:
# service = ""
# If there is a timeframe component, we do not currently
# support it for now.
if ruleConfig.get( 'detection', {} ).get( 'timeframe', None ) is not None:
raise NotImplementedError("Timeframes are not supported by backend.")
# Don't use service for now, most Windows Event Logs
# uses a different service with no category, since we
# treat all Windows Event Logs together we can ignore
# the service.
service = ""
# See if we have a definition for the source combination.
mappingKey = "%s/%s/%s" % (product, category, service)
topFilter, preCond, mappings, isAllStringValues, keywordField, postOpMapper = _allFieldMappings.get(mappingKey, tuple([None, None, None, None, None, None]))
if mappings is None:
raise NotImplementedError("Log source %s/%s/%s not supported by backend." % (product, category, service))
# Field name conversions.
self._fieldMappingInEffect = mappings
# LC event type pre-selector for the type of data.
self._preCondition = preCond
# Are all the values treated as strings?
self._isAllStringValues = isAllStringValues
# Are we supporting keywords full text search?
self._keywordField = keywordField
# Call to fixup all operations after the fact.
self._postOpMapper = postOpMapper
# Call the original generation code.
detectComponent = super().generate(sigmaparser)
# We expect a string (yaml) as output, so if
# we get anything else we assume it's a core
# library value and just return it as-is.
if not isinstance( detectComponent, str):
return detectComponent
# This redundant to deserialize it right after
# generating the yaml, but we try to use the parent
# official class code as much as possible for future
# compatibility.
detectComponent = yaml.safe_load(detectComponent)
# Check that we got a proper node and not just a string
# which we don't really know what to do with.
if not isinstance(detectComponent, dict):
raise NotImplementedError("Selection combination not supported.")
# Apply top level filter.
detectComponent.update(topFilter)
# Now prepare the Response component.
respondComponents = [{
"action": "report",
"name": ruleConfig["title"],
}]
# Add a lot of the metadata available to the report.
if ruleConfig.get("tags", None) is not None:
respondComponents[0].setdefault("metadata", {})["tags"] = ruleConfig["tags"]
if ruleConfig.get("description", None) is not None:
respondComponents[0].setdefault("metadata", {})["description"] = ruleConfig["description"]
if ruleConfig.get("references", None) is not None:
respondComponents[0].setdefault("metadata", {})["references"] = ruleConfig["references"]
if ruleConfig.get("level", None) is not None:
respondComponents[0].setdefault("metadata", {})["level"] = ruleConfig["level"]
if ruleConfig.get("author", None) is not None:
respondComponents[0].setdefault("metadata", {})["author"] = ruleConfig["author"]
if ruleConfig.get("falsepositives", None) is not None:
respondComponents[0].setdefault("metadata", {})["falsepositives"] = ruleConfig["falsepositives"]
# Assemble it all as a single, complete D&R rule.
return yaml.safe_dump({
"detect": detectComponent,
"respond": respondComponents,
}, default_flow_style = False)
def generateQuery(self, parsed):
# We override the generateQuery function because
# we generate proper JSON structures internally
# and only convert to string (yaml) once the
# whole thing is assembled.
result = self.generateNode(parsed.parsedSearch)
if self._preCondition is not None:
result = {
"op": "and",
"rules": [
self._preCondition,
result,
]
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return yaml.safe_dump(result)
def generateANDNode(self, node):
generated = [ self.generateNode(val) for val in node ]
filtered = [ g for g in generated if g is not None ]
if not filtered:
return None
# Map any possible keywords.
filtered = self._mapKeywordVals(filtered)
if 1 == len(filtered):
if self._postOpMapper is not None:
filtered[0] = self._postOpMapper(filtered[0])
return filtered[0]
result = {
"op": "and",
"rules": filtered,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
def generateORNode(self, node):
generated = [self.generateNode(val) for val in node]
filtered = [g for g in generated if g is not None]
if not filtered:
return None
# Map any possible keywords.
filtered = self._mapKeywordVals(filtered)
if 1 == len(filtered):
if self._postOpMapper is not None:
filtered[0] = self._postOpMapper(filtered[0])
return filtered[0]
result = {
"op": "or",
"rules": filtered,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
def generateNOTNode(self, node):
generated = self.generateNode(node.item)
if generated is None:
return None
if not isinstance(generated, dict):
raise NotImplementedError("Not operator not available on non-dict nodes.")
generated["not"] = not generated.get("not", False)
return generated
def generateSubexpressionNode(self, node):
return self.generateNode(node.items)
def generateListNode(self, node):
return [self.generateNode(value) for value in node]
def generateMapItemNode(self, node):
fieldname, value = node
fieldNameAndValCallback = None
# The mapping can be a dictionary of mapping or a callable
# to get the correct value.
if callable(self._fieldMappingInEffect):
fieldname = self._fieldMappingInEffect(fieldname)
else:
try:
# The mapping can also be a callable that will
# return a mapped key AND value.
if callable(self._fieldMappingInEffect[fieldname]):
fieldNameAndValCallback = self._fieldMappingInEffect[fieldname]
else:
fieldname = self._fieldMappingInEffect[fieldname]
except:
raise NotImplementedError("Field name %s not supported by backend." % (fieldname,))
# If fieldname returned is None, it's a special case where we
# ignore the node.
if fieldname is None:
return None
if isinstance(value, (int, str)):
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
op, newVal = self._valuePatternToLcOp(value)
newOp = {
"op": op,
"path": fieldname,
"case sensitive": False,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
if self._postOpMapper is not None:
newOp = self._postOpMapper(newOp)
return newOp
elif isinstance(value, list):
subOps = []
for v in value:
if fieldNameAndValCallback is not None:
fieldname, v = fieldNameAndValCallback(fieldname, v)
op, newVal = self._valuePatternToLcOp(v)
newOp = {
"op": op,
"path": fieldname,
"case sensitive": False,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
if self._postOpMapper is not None:
newOp = self._postOpMapper(newOp)
subOps.append(newOp)
if 1 == len(subOps):
return subOps[0]
return {
"op": "or",
"rules": subOps
}
elif isinstance(value, SigmaTypeModifier):
if isinstance(value, SigmaRegularExpressionModifier):
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
result = {
"op": "matches",
"path": fieldname,
"re": re.compile(value),
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
else:
raise TypeError("Backend does not support TypeModifier: %s" % (str(type(value))))
elif value is None:
if fieldNameAndValCallback is not None:
fieldname, value = fieldNameAndValCallback(fieldname, value)
result = {
"op": "exists",
"not": True,
"path": fieldname,
}
if self._postOpMapper is not None:
result = self._postOpMapper(result)
return result
else:
raise TypeError("Backend does not support map values of type " + str(type(value)))
def generateValueNode(self, node):
return node
def _valuePatternToLcOp(self, val):
# Here we convert the string values supported by Sigma that
# can include wildcards into either proper values (string or int)
# or into altered values to be functionally equivalent using
# a few different LC D&R rule operators.
# No point evaluating non-strings.
if not isinstance(val, str):
return ("is", str(val) if self._isAllStringValues else val)
# Is there any wildcard in this string? If not, we can short circuit.
if "*" not in val and "?" not in val:
return ("is", val)
# Now we do a small optimization for the shortcut operators
# available in LC. We try to see if the wildcards are around
# the main value, but NOT within. If that's the case we can
# use the "starts with", "ends with" or "contains" operators.
isStartsWithWildcard = False
isEndsWithWildcard = False
tmpVal = val
if tmpVal.startswith("*"):
isStartsWithWildcard = True
tmpVal = tmpVal[1:]
if tmpVal.endswith("*") and not (tmpVal.endswith("\\*") and not tmpVal.endswith("\\\\*")):
isEndsWithWildcard = True
if tmpVal.endswith("\\\\*"):
# An extra \ had to be there so it didn't escapte the
# *, but since we plan on removing the *, we can also
# remove one \.
tmpVal = tmpVal[:-2]
else:
tmpVal = tmpVal[:-1]
# Check to see if there are any other wildcards. If there are
# we cannot use our shortcuts.
if "*" not in tmpVal and "?" not in tmpVal:
if isStartsWithWildcard and isEndsWithWildcard:
return ("contains", tmpVal)
if isStartsWithWildcard:
return ("ends with", tmpVal)
if isEndsWithWildcard:
return ("starts with", tmpVal)
# This is messy, but it is accurate in generating a RE based on
# the simplified wildcard system, while also supporting the
# escaping of those wildcards.
segments = []
tmpVal = val
while True:
nEscapes = 0
for i in range(len(tmpVal)):
# We keep a running count of backslash escape
# characters we see so that if we meet a wildcard
# we can tell whether the wildcard is escaped
# (with odd number of escapes) or if it's just a
# backslash literal before a wildcard (even number).
if "\\" == tmpVal[i]:
nEscapes += 1
continue
if "*" == tmpVal[i]:
if 0 == nEscapes:
segments.append(re.escape(tmpVal[:i]))
segments.append(".*")
elif nEscapes % 2 == 0:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i])
segments.append(".*")
else:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i + 1])
tmpVal = tmpVal[i + 1:]
break
if "?" == tmpVal[i]:
if 0 == nEscapes:
segments.append(re.escape(tmpVal[:i]))
segments.append(".")
elif nEscapes % 2 == 0:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i])
segments.append(".")
else:
segments.append(re.escape(tmpVal[:i - nEscapes]))
segments.append(tmpVal[i - nEscapes:i + 1])
tmpVal = tmpVal[i + 1:]
break
nEscapes = 0
else:
segments.append(re.escape(tmpVal))
break
val = ''.join(segments)
return ("matches", val)
def _mapKeywordVals(self, values):
# This function ensures that the list of values passed
# are proper D&R operations, if they are strings it indicates
# they were requested as keyword matches. We only support
# keyword matches when specified in the config. We generally just
# map them to the most common field in LC that makes sense.
mapped = []
for val in values:
# Non-keywords are just passed through.
if not isinstance(val, str):
mapped.append(val)
continue
if self._keywordField is None:
raise NotImplementedError("Full-text keyboard searches not supported.")
# This seems to be indicative only of "keywords" which are mostly
# representative of full-text searches. We don't suport that but
# in some data sources we can alias them to an actual field.
op, newVal = self._valuePatternToLcOp(val)
newOp = {
"op": op,
"path": self._keywordField,
}
if op == "matches":
newOp["re"] = newVal
else:
newOp["value"] = newVal
mapped.append(newOp)
return mapped
@@ -15,12 +15,28 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
from functools import wraps
from .base import SingleTextQueryBackend
from .exceptions import NotSupportedError
def wrapper(method):
@wraps(method)
def _impl(self, method_args):
key, value, *_ = method_args
if '.keyword' in key:
key = key.split('.keyword')[0]
if key not in self.skip_fields:
method_output = method(self, method_args)
return method_output
else:
return
return _impl
class WindowsDefenderATPBackend(SingleTextQueryBackend):
"""Converts Sigma rule into Windows Defender ATP Hunting Queries."""
identifier = "wdatp"
"""Converts Sigma rule into Microsoft Defender ATP Hunting Queries."""
identifier = "mdatp"
active = True
config_required = False
@@ -41,6 +57,16 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend):
mapExpression = "%s == %s"
mapListsSpecialHandling = True
mapListValueExpression = "%s in %s"
skip_fields = {
"Description",
"_exists_",
"FileVersion",
"Product",
"Company",
"ParentProcessName",
"ParentCommandLine"
}
def __init__(self, *args, **kwargs):
"""Initialize field mappings"""
@@ -52,11 +78,12 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend):
# (replacement, ): Replaces field occurrence with static string
"AccountName" : (self.id_mapping, self.default_value_mapping),
"CommandLine" : ("ProcessCommandLine", self.default_value_mapping),
"ComputerName" : (self.id_mapping, self.default_value_mapping),
"DeviceName" : (self.id_mapping, self.default_value_mapping),
"DestinationHostname" : ("RemoteUrl", self.default_value_mapping),
"DestinationIp" : ("RemoteIP", self.default_value_mapping),
"DestinationIsIpv6" : ("RemoteIP has \":\"", ),
"DestinationPort" : ("RemotePort", self.default_value_mapping),
"Protocol" : ("RemoteProtocol", self.default_value_mapping),
"Details" : ("RegistryValueData", self.default_value_mapping),
"EventType" : ("ActionType", self.default_value_mapping),
"Image" : ("FolderPath", self.default_value_mapping),
@@ -137,20 +164,21 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend):
self.service = None
if (self.category, self.product, self.service) == ("process_creation", "windows", None):
self.table = "ProcessCreationEvents"
self.table = "DeviceProcessEvents"
elif (self.category, self.product, self.service) == (None, "windows", "powershell"):
self.table = "MiscEvents"
self.table = "DeviceEvents"
self.orToken = ", "
return super().generate(sigmaparser)
def generateBefore(self, parsed):
if self.table is None:
raise NotSupportedError("No WDATP table could be determined from Sigma rule")
if self.table == "MiscEvents" and self.service == "powershell":
raise NotSupportedError("No MDATP table could be determined from Sigma rule")
if self.table == "DeviceEvents" and self.service == "powershell":
return "%s | where tostring(extractjson('$.Command', AdditionalFields)) in~ " % self.table
return "%s | where " % self.table
@wrapper
def generateMapItemNode(self, node):
"""
ATP queries refer to event tables instead of Windows logging event identifiers. This method catches conditions that refer to this field
@@ -165,26 +193,26 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend):
if self.product == "windows":
if self.service == "sysmon" and value == 1 \
or self.service == "security" and value == 4688: # Process Execution
self.table = "ProcessCreationEvents"
self.table = "DeviceProcessEvents"
return None
elif self.service == "sysmon" and value == 3: # Network Connection
self.table = "NetworkCommunicationEvents"
self.table = "DeviceNetworkEvents"
return None
elif self.service == "sysmon" and value == 7: # Image Load
self.table = "ImageLoadEvents"
self.table = "DeviceImageLoadEvents"
return None
elif self.service == "sysmon" and value == 8: # Create Remote Thread
self.table = "MiscEvents"
self.table = "DeviceEvents"
return "ActionType == \"CreateRemoteThreadApiCall\""
elif self.service == "sysmon" and value == 11: # File Creation
self.table = "FileCreationEvents"
self.table = "DeviceFileEvents"
return None
elif self.service == "sysmon" and value == 13 \
or self.service == "security" and value == 4657: # Set Registry Value
self.table = "RegistryEvents"
self.table = "DeviceRegistryEvents"
return "ActionType == \"RegistryValueSet\""
elif self.service == "security" and value == 4624:
self.table = "LogonEvents"
self.table = "DeviceLogonEvents"
return None
elif type(value) in (str, int): # default value processing
try:
+43 -2
View File
@@ -16,7 +16,7 @@
import re
import sigma
from .base import SingleTextQueryBackend
from .base import SingleTextQueryBackend, CorelightQueryBackend
from .mixins import MultiRuleOutputMixin
class SplunkBackend(SingleTextQueryBackend):
@@ -72,6 +72,7 @@ class SplunkBackend(SingleTextQueryBackend):
def generate(self, sigmaparser):
"""Method is called for each sigma rule and receives the parsed rule (SigmaParser)"""
columns = list()
mapped =None
try:
for field in sigmaparser.parsedyaml["fields"]:
mapped = sigmaparser.config.get_fieldmapping(field).resolve_fieldname(field, sigmaparser)
@@ -160,7 +161,7 @@ class SplunkXMLBackend(SingleTextQueryBackend, MultiRuleOutputMixin):
query = self.generateQuery(parsed)
if query is not None:
self.queries += self.panel_pre
self.queries += self.getRuleName(sigmaparser)
self.queries += sigmaparser.parsedyaml.get("title") or ""
self.queries += self.panel_inf
query = query.replace("<", "&lt;")
query = query.replace(">", "&gt;")
@@ -170,3 +171,43 @@ class SplunkXMLBackend(SingleTextQueryBackend, MultiRuleOutputMixin):
def finalize(self):
self.queries += self.dash_suf
return self.queries
class SplunkCorelightBackend(CorelightQueryBackend, SplunkBackend):
identifier = "corelight_splunk"
class CrowdStrikeBackend(SplunkBackend):
"""Converts Sigma rule into CrowdStrike Search Processing Language (SPL)."""
identifier = "crowdstrike"
def generate(self, sigmaparser):
lgs = sigmaparser.parsedyaml.get("logsource")
if lgs.get("product") == "windows" and (lgs.get("service") == "sysmon" or lgs.get("category") == "process_creation"):
fieldmappings = sigmaparser.config.fieldmappings
detections = sigmaparser.definitions
all_fields = dict()
for det in detections.values():
for field, value in det.items():
if "|" in field:
field = field.split("|")[0]
if any([item for item in fieldmappings.keys() if field == item]):
if field == "EventID" and str(value) == str(1) and lgs.get("service") == "sysmon":
all_fields.update(det)
elif field != "EventID":
all_fields.update(det)
else:
raise NotImplementedError("Not supported fields!")
else:
raise NotImplementedError("Not supported fields!")
table_fields = sigmaparser.parsedyaml.get("fields", [])
res_table_fields = []
for fl in table_fields:
if fl in fieldmappings.keys():
res_table_fields.append(fl)
sigmaparser.parsedyaml["fields"] = res_table_fields
return super().generate(sigmaparser)
else:
raise NotImplementedError("Not supported logsources!")