From fb9c5841f4fde53437bc01b764060fdf63bf52ea Mon Sep 17 00:00:00 2001 From: vh Date: Fri, 8 May 2020 13:41:52 +0300 Subject: [PATCH 1/5] Added Humio, Crowdstrike, Corelight --- tools/config/ala.yml | 101 ++ tools/config/arcsight-zeek.yml | 1053 ++++++++++++++++ tools/config/crowdstrike.yml | 19 + tools/config/ecs-dns.yml | 69 ++ tools/config/ecs-proxy.yml | 191 ++- tools/config/ecs-zeek-corelight.yml | 1182 ++++++++++++++++++ tools/config/elk-defaultindex-filebeat.yml | 2 + tools/config/elk-defaultindex-logstash.yml | 2 + tools/config/elk-defaultindex.yml | 3 + tools/config/elk-linux.yml | 15 + tools/config/elk-windows.yml | 30 + tools/config/elk-winlogbeat-sp.yml | 95 ++ tools/config/elk-winlogbeat.yml | 94 ++ tools/config/filebeat-zeek-ecs.yml | 468 +++++++ tools/config/humio.yml | 97 ++ tools/config/logstash-zeek-default-json.yml | 349 ++++++ tools/config/powershell-windows-all.yml | 62 + tools/config/splunk-zeek.yml | 423 ++++++- tools/config/winlogbeat-modules-enabled.yml | 30 +- tools/sigma/backends/ala.py | 225 +++- tools/sigma/backends/arcsight.py | 11 +- tools/sigma/backends/base.py | 32 + tools/sigma/backends/carbonblack.py | 97 +- tools/sigma/backends/discovery.py | 2 +- tools/sigma/backends/elasticsearch.py | 276 ++++- tools/sigma/backends/humio.py | 160 +++ tools/sigma/backends/limacharlie.py | 1213 ++++++++++--------- tools/sigma/backends/{wdatp.py => mdatp.py} | 56 +- tools/sigma/backends/splunk.py | 45 +- 29 files changed, 5649 insertions(+), 753 deletions(-) create mode 100644 tools/config/ala.yml create mode 100644 tools/config/arcsight-zeek.yml create mode 100644 tools/config/crowdstrike.yml create mode 100644 tools/config/ecs-dns.yml create mode 100644 tools/config/ecs-zeek-corelight.yml create mode 100644 tools/config/elk-defaultindex-filebeat.yml create mode 100644 tools/config/elk-defaultindex-logstash.yml create mode 100644 tools/config/elk-defaultindex.yml create mode 100644 tools/config/elk-linux.yml create mode 100644 tools/config/elk-windows.yml create mode 100644 tools/config/elk-winlogbeat-sp.yml create mode 100644 tools/config/elk-winlogbeat.yml create mode 100644 tools/config/filebeat-zeek-ecs.yml create mode 100644 tools/config/humio.yml create mode 100644 tools/config/logstash-zeek-default-json.yml create mode 100644 tools/config/powershell-windows-all.yml create mode 100644 tools/sigma/backends/humio.py rename tools/sigma/backends/{wdatp.py => mdatp.py} (87%) diff --git a/tools/config/ala.yml b/tools/config/ala.yml new file mode 100644 index 000000000..1f4dd8ffc --- /dev/null +++ b/tools/config/ala.yml @@ -0,0 +1,101 @@ +title: Azure Sentinel +order: 20 +backends: + - ala + - ala-rule +fieldmappings: + ComputerName: Computer + Event-ID: EventID + Event_ID: EventID + eventId: EventID + event_id: EventID + event-id: EventID + eventid: EventID + hashes: Hashes + file_hash: Hashes + url.query: URL + resource.URL: URL + src_ip: SourceIp + source.ip: SourceIp + FileName: TargetFilename + dst_ip: DestinationIP + destination.ip: DestinationIP + event_data.AccessMask: AccessMask + event_data.AllowedToDelegateTo: AllowedToDelegateTo + event_data.AttributeLDAPDisplayName: AttributeLDAPDisplayName + event_data.AuditPolicyChanges: AuditPolicyChanges + event_data.AuthenticationPackageName: AuthenticationPackageName + event_data.CallingProcessName: CallingProcessName + event_data.CallTrace": CallTrace + event_data.CommandLine: CommandLine + Commandline: CommandLine + cmd: CommandLine + event_data.ComputerName: ComputerName + event_data.CurrentDirectory: CurrentDirectory + event_data.Description: Description + event_data.DestinationHostname: DestinationHostname + event_data.DestinationIp: DestinationIp + event_data.DestinationPort: DestinationPort + event_data.Details: Details + event_data.EngineVersion: EngineVersion + event_data.EventType: EventType + event_data.FailureCode: FailureCode + event_data.FileName: FileName + event_data.GrantedAccess: GrantedAccess + event_data.GroupName: GroupName + event_data.GroupSid: GroupSid + event_data.Hashes: Hashes + event_data.HiveName: HiveName + event_data.HostVersion: HostVersion + Image: + service=security: Process + category=process_creation: NewProcessName + default: Image + event_data.Image: + service=security: Process + category=process_creation: NewProcessName + default: Image + event_data.ImageLoaded": ImageLoaded + event_data.ImagePath: ImagePath + event_data.Imphash: Imphash + event_data.IpAddress: IpAddress + event_data.KeyLength: KeyLength + event_data.LogonProcessName: LogonProcessName + event_data.LogonType: LogonType + event_data.NewProcessName: NewProcessName + event_data.ObjectClass: ObjectClass + event_data.ObjectName: ObjectName + event_data.ObjectType: ObjectType + event_data.ObjectValueName: ObjectValueName + event_data.ParentCommandLine: ParentCommandLine + event_data.ParentImage: + category=process_creation: ParentProcessName + default: ParentImage + ParentImage: + category=process_creation: ParentProcessName + default: ParentImage + event_data.ParentProcessName: ParentProcessName + event_data.Path: Path + event_data.PipeName: PipeName + event_data.ProcessCommandLine: CommanProcessCommandLinedLine + event_data.ProcessName: ProcessName + event_data.Properties: Properties + event_data.SecurityID: SecurityID + event_data.ServiceFileName: ServiceFileName + event_data.ServiceName: ServiceName + event_data.ShareName: ShareName + event_data.Signature: Signature + event_data.Source: Source + event_data.SourceImage: SourceImage + event_data.StartModule: StartModule + event_data.Status: Status + event_data.SubjectUserName: SubjectUserName + event_data.SubjectUserSid: SubjectUserSid + event_data.TargetFilename: TargetFilename + event_data.TargetImage: TargetImage + event_data.TargetObject: TargetObject + event_data.TicketEncryptionType: TicketEncryptionType + event_data.TicketOptions: TicketOptions + event_data.User: User + event_data.WorkstationName: WorkstationName + diff --git a/tools/config/arcsight-zeek.yml b/tools/config/arcsight-zeek.yml new file mode 100644 index 000000000..e27316b45 --- /dev/null +++ b/tools/config/arcsight-zeek.yml @@ -0,0 +1,1053 @@ +title: ArcSight Corelight Zeek and Corelight Opensource Zeek Configuration +order: 20 +backends: + - arcsight + - arcsight-esm +logsources: + zeek: + product: zeek + conditions: + deviceVendor: Bro + zeek-category-accounting: + category: accounting + rewrite: + product: zeek + service: syslog + zeek-category-firewall: + category: firewall + conditions: + deviceEventCategory: conn + zeek-category-dns: + category: dns + conditions: + deviceEventCategory: dns + zeek-category-proxy: + category: proxy + rewrite: + product: zeek + service: http + zeek-category-webserver: + category: webserver + conditions: + deviceEventCategory: http + rewrite: + product: zeek + service: http + zeek-conn: + product: zeek + service: conn + conditions: + deviceEventCategory: conn + zeek-conn_long: + product: zeek + service: conn_long + conditions: + deviceEventCategory: conn_long + zeek-dce_rpc: + product: zeek + service: dce_rpc + conditions: + deviceEventCategory: dce_rpc + zeek-dns: + product: zeek + service: dns + conditions: + deviceEventCategory: dns + zeek-dnp3: + product: zeek + service: dnp3 + conditions: + deviceEventCategory: dnp3 + zeek-dpd: + product: zeek + service: dpd + conditions: + deviceEventCategory: dpd + zeek-files: + product: zeek + service: files + conditions: + deviceEventCategory: files + zeek-ftp: + product: zeek + service: ftp + conditions: + deviceEventCategory: ftp + zeek-gquic: + product: zeek + service: gquic + conditions: + deviceEventCategory: gquic + zeek-http: + product: zeek + service: http + conditions: + deviceEventCategory: http + zeek-http2: + product: zeek + service: http2 + conditions: + deviceEventCategory: http2 + zeek-intel: + product: zeek + service: intel + conditions: + deviceEventCategory: intel + zeek-irc: + product: zeek + service: irc + conditions: + deviceEventCategory: irc + zeek-kerberos: + product: zeek + service: kerberos + conditions: + deviceEventCategory: kerberos + zeek-known_certs: + product: zeek + service: known_certs + conditions: + deviceEventCategory: known_certs + zeek-known_hosts: + product: zeek + service: known_hosts + conditions: + deviceEventCategory: known_hosts + zeek-known_modbus: + product: zeek + service: known_modbus + conditions: + deviceEventCategory: known_modbus + zeek-known_services: + product: zeek + service: known_services + conditions: + deviceEventCategory: known_services + zeek-modbus: + product: zeek + service: modbus + conditions: + deviceEventCategory: modbus + zeek-modbus_register_change: + product: zeek + service: modbus_register_change + conditions: + deviceEventCategory: modbus_register_change + zeek-mqtt_connect: + product: zeek + service: mqtt_connect + conditions: + deviceEventCategory: mqtt_connect + zeek-mqtt_publish: + product: zeek + service: mqtt_publish + conditions: + deviceEventCategory: mqtt_publish + zeek-mqtt_subscribe: + product: zeek + service: mqtt_subscribe + conditions: + deviceEventCategory: mqtt_subscribe + zeek-mysql: + product: zeek + service: mysql + conditions: + deviceEventCategory: mysql + zeek-notice: + product: zeek + service: notice + conditions: + deviceEventCategory: notice + zeek-ntlm: + product: zeek + service: ntlm + conditions: + deviceEventCategory: ntlm + zeek-ntp: + product: zeek + service: ntp + conditions: + deviceEventCategory: ntp + zeek-ocsp: + product: zeek + service: ntp + conditions: + deviceEventCategory: ocsp + zeek-pe: + product: zeek + service: pe + conditions: + deviceEventCategory: pe + zeek-pop3: + product: zeek + service: pop3 + conditions: + deviceEventCategory: pop3 + zeek-radius: + product: zeek + service: radius + conditions: + deviceEventCategory: radius + zeek-rdp: + product: zeek + service: rdp + conditions: + deviceEventCategory: rdp + zeek-rfb: + product: zeek + service: rfb + conditions: + deviceEventCategory: rfb + zeek-sip: + product: zeek + service: sip + conditions: + deviceEventCategory: sip + zeek-smb_files: + product: zeek + service: smb_files + conditions: + deviceEventCategory: smb_files + zeek-smb_mapping: + product: zeek + service: smb_mapping + conditions: + deviceEventCategory: smb_mapping + zeek-smtp: + product: zeek + service: smtp + conditions: + deviceEventCategory: smtp + zeek-smtp_links: + product: zeek + service: smtp_links + conditions: + deviceEventCategory: smtp_links + zeek-snmp: + product: zeek + service: snmp + conditions: + deviceEventCategory: snmp + zeek-socks: + product: zeek + service: socks + conditions: + deviceEventCategory: socks + zeek-software: + product: zeek + service: software + conditions: + deviceEventCategory: software + zeek-ssh: + product: zeek + service: ssh + conditions: + deviceEventCategory: ssh + zeek-ssl: + product: zeek + service: ssl + conditions: + deviceEventCategory: tls + zeek-tls: # In case people call it TLS even though orig log is called ssl, but dataset is tls so may cause confusion so cover that + product: zeek + service: tls + conditions: + deviceEventCategory: tls + zeek-syslog: + product: zeek + service: syslog + conditions: + deviceEventCategory: syslog + zeek-tunnel: + product: zeek + service: tunnel + conditions: + deviceEventCategory: tunnel + zeek-traceroute: + product: zeek + service: traceroute + conditions: + deviceEventCategory: traceroute + zeek-weird: + product: zeek + service: weird + conditions: + deviceEventCategory: weird + zeek-x509: + product: zeek + service: x509 + conditions: + deviceEventCategory: x509 + zeek-ip_search: + product: zeek + service: network + conditions: + deviceEventCategory: + - conn + - conn_long + - dce_rpc + - dhcp + - dnp3 + - dns + - ftp + - gquic + - http + - irc + - kerberos + - modbus + - mqtt_connect + - mqtt_publish + - mqtt_subscribe + - mysql + - ntlm + - ntp + - radius + - rfb + - sip + - smb_files + - smb_mapping + - smtp + - smtp_links + - snmp + - socks + - ssh + - tls #SSL + - tunnel + - weird +fieldmappings: + cs-uri-extension: fileType + cs-uri-path: filePath + s-dns: + - destinationDnsDomain + - destinationHost + # All Logs Applied Mapping & Taxonomy + clientip: sourceAddress + dst: destinationAddress + dst_ip: destinationAddress + dst_port: destinationPort + host: requestHost + #inner_vlan: + mac: sourceMacAddress + mime_type: fileType + network_application: applicationProtocol + #network_community_id: + network_protocol: transportProtocol + password: message + port_num: sourcePort + proto: transportProtocol + #result: + #rtt: + server_name: destinationHostName + src: sourceAddress + src_ip: sourceAddress + src_port: sourcePort + #success: + uri: + - requestUrl + - requestUrlQuery + user: sourceUserName + username: sourceUserName + user_agent: + - deviceCustomString5 + - requestClientApplication + #vlan: + # DNS matching Taxonomy & DNS Category + answer: message + #question_length: + record_type: deviceCustomString1 + #parent_domain: + # HTTP matching Taxonomy & Web/Proxy Category + cs-bytes: bytesOut + cs-cookie: message + r-dns: + - destinationDnsDomain + - destinationHost + sc-bytes: bytesIn + sc-status: message + c-uri: + - requestUrl + - requestUrlQuery + c-uri-extension: fileType + c-uri-query: + - requestUrl + - requestUrlQuery + c-uri-stem: + - requestUrl + - requestUrlQuery + c-useragent: + - deviceCustomString5 + - requestClientApplication + cs-host: + - destinationDnsDomain + - destinationHost + cs-method: requestMethod + cs-referrer: + - deviceCustomString4 + - requestContext + cs-version: message + # All log UIDs + #cert_chain_fuids: + #client_cert_chain_fuids: + #client_cert_fuid: + #conn_uids: + #fid: + #fuid: + #fuids: + #id: + #orig_fuids: + #parent_fuid: + #related_fuids: + #resp_fuids: + #server_cert_fuid: + #tunnel_parents: + #uid: + #uids: + #uuid: + # Overlapping fields/mappings (aka: shared fields) + action: + - 'deviceAction' + #service=smb_files: + #service=mqtt: + #service=tunnel: + addl: + - 'message' + #service=dns: + #service=weird: + analyzer: + - 'applicationProtocol' + - 'name' + #service=dpd: + #service=files: + arg: + - 'message' + #auth: + #service=rfb: #RFB does not exist in newer logs, so skipping to cover dns.auth + cipher: + - 'deviceCustomString4' + - 'message' + #service=kerberos: + #service=ssl: + client: + - 'deviceCustomString5' + #service=kerberos: + #service=ssh: + command: + - 'message' + #service=pop3: + #service=ftp: + #service=irc: + date: + #service=sip: + #service=smtp: + duration: + - 'deviceCustomString4' + #service=conn: + #service=files: + #service=snmp: + from: + - 'message' + #service=kerberos: + #service=smtp: + #is_orig: + #service=file: + #service=pop3: + #local_orig: + #service=conn + #service=files + method: + - 'requestMethod' + #service=http: + #service=sip: + msg: + - 'message' + #service=notice: + #service=pop3: + name: + - 'name' + #service=smb_files: + #service=software: + #service=weird: + path: + - 'filePath' + #service=smb_files: + #service=smb_mapping: + #service=smtp: + reply_msg: + - 'message' + #service=ftp: + #service=radius: + reply_to: + - 'message' + #service=sip: + #service=smtp: + response_body_len: + - 'bytesOut' + #service=http: + #service=sip: + request_body_len: + - 'bytesIn' + #service=http: + #service=sip: + service: + - 'applicationProtocol' + #service=kerberos: + #service=smb_mapping: + status: + - 'message' + #service=pop3: + #service=mqtt: + #service=socks: + status_msg: + - 'message' + #subject: + - 'message' + #service=known_certs: + #service=sip: + #service=smtp: + #service=ssl: + trans_depth: + - 'deviceCustomNumber1' + #service=http: + #service=sip: + #service=smtp: + version: + - 'message' + - 'deviceCustomString2' + #service=gquic: + #service=ntp: + #service=socks: + #service=snmp: + #service=ssh: + #service=tls: + # Conn and Conn Long + #cache_add_rx_ev: + #cache_add_rx_mpg: + #cache_add_rx_new: + #cache_add_tx_ev: + #cache_add_tx_mpg: + #cache_del_mpg: + #cache_entries: + conn_state: deviceSeverity + #corelight_shunted: + #duration: deviceCustomString4 + #history: + #id.orig_h.name_src: + #id.orig_h.names_vals: + #id.resp_h.name_src: + #id.resp_h.name_vals: + #local_orig: + #local_resp: + missed_bytes: deviceCustomNumber1 + orig_bytes: bytesOut + #orig_cc: + orig_ip_bytes: deviceCustomNumber2 + orig_l2_addr: sourceMacAddress + #orig_pkts: + resp_bytes: bytesIn + #resp_cc: + resp_ip_bytes: deviceCustomNumber3 + resp_l2_addr: destinationMacAddress + #resp_pkts: + # DCE-RPC Specific + endpoint: message + named_pipe: message + operation: message + #rtt: + # DHCP + domain: message + host_name: message + lease_time: deviceCustomString4 + agent_remote_id: message + assigned_addr: message + circuit_id: message + client_message: message + client_software: message + client_fqdn: message + #mac:sourceMacAddress + msg_orig: message + msg_types: message + requested_addr: message + server_addr: message + server_message: message + server_software: message + subscriber_id: message + # DNS + AA: message + #addl: message + auth: message + answers: message + TTLs: message + RA: message + RD: message + rejected: eventOutcome + TC: message + Z: message + qclass: message + qclass_name: deviceCustomString4 + qtype: deviceEventClassId + qtype_name: + - deviceCustomString1 + - name + query: destinationDnsDomain + rcode_name: message + rcode: message + rtt: message + trans_id: deviceCustomNumber1 + # DNP3 + fc_reply: message + fc_request: message + iin: message + # DPD + #analyzer: + failure_reason: message + packet_segment: message + # Files + rx_hosts: destinationHostName + tx_hosts: sourceHostName + #analyzer: + #depth: + #duration: + #extracted: + #extracted_cutoff: + #extracted_size: + #entropy: + md5: fileHash + sha1: fileHash + sha256: fileHash + #is_orig: + #local_orig: + #missing_bytes: + filename: fileName + overflow_bytes: bytesOut + #seen_bytes: + source: filePath + total_bytes: bytesIn + #timedout: + # GQUIC/QUIC + cyu: message + cyutags: message + #server_name: message + tag_count: message + #user_agent: deviceCustomString5 + #version: + # FTP + #arg: message + #command: message + cwd: message + data_channel.orig_h: message + data_channel.passive: eventOutcome + data_channel.resp_h: message + data_channel.resp_p: deviceCustomNumber1 + passive: message + file_size: fileSize + #mime_type: fileType + #password: message + reply_code: deviceEventClassId + #reply_msg: message + #user: sourceUserName + # HTTP + client_header_names: message + cookie_vars: message + flash_version: message + info_code: message + info_msg: message + omniture: message + orig_filenames: fileName + orig_mime_types: fileType + origin: message + #password: message + post_body: message + proxied: message + referrer: + - deviceCustomString4 + - requestContext + resp_filenames: fileName + resp_mime_types: fileType + server_header_names: message + status_code: deviceSeverity + #status_msg: message + #trans_depth: + uri_vars: message + #user_agent: deviceCustomString5 + #username: sourceUserName + # Intel + file_mime_type: message + file_desc: message + #host: + matched: message + indicator: message + indicator_type: message + node: message + where: message + sources: message + # IRC + dcc_file_name: fileName + dcc_file_size: fileSize + dcc_mime_type: fileType + #command: + nick: message + #user: + value: message + # Kerberos + auth_ticket: message + #cipher: message + #client: message + client_cert_subject: message + error_code: message + error_msg: message + #from: message + forwardable: message + new_ticket: message + renewable: message + request_type: message + server_cert_subject: message + #service: applicationProtocol + #success: + till: message + # Known_Certs + #host: sourceAddress + issuer_subject: deviceCustomString3 + #port_num: sourcePort + serial: deviceCustomString4 + #subject: message + # Known_Modbus + #host: + device_type: message + # Known_Services + port_proto: transport + #port_num: sourcePort + # Modbus All + delta: message + new_val: message + old_val: message + register: message + # Modbus + func: message + exception: message + track_address: message + # ModBus_Register_Change + #delta: message + #new_val: message + #old_val: message + #register: message + # MQTT_Connect , MQTT_Publish, MQTT_Subscribe + ack: message + #action: message + client_id: message + connect_status: message + from_client: message + granted_qos_level: message + payload: message + payload_len: message + proto_name: message + proto_version: message + qos: message + qos_levels: message + retain: message + #status: message + topic: message + topics: message + will_payload: message + will_topic: message + # MYSQL + #arg: message + cmd: message + response: message + rows: message + #success: + # Notice + actions: deviceEventClassId + #dropped: + #dst: destinationAddress + email_body_sections: message + email_delay_tokens: message + identifier: message + #msg: + n: message + note: message + p: destinationPort + peer_descr: deviceCustomString5 + peer_name: deviceCustomString4 + #proto: transport + #src: sourceAddress + sub: message + subpress_for: deviceCustomFloatingPoint1 + # NTLM + domainname: message + hostname: message + #username: sourceUserName + server_nb_computer_name: message + server_tree_name: message + #success: + server_dns_computer_name: message + # NTP + mode: message + num_exts: message + org_time: message + poll: message + precision: message + rec_time: message + ref_id: message + ref_time: message + root_delay: message + root_disp: message + stratum: message + #version: + xmt_time: message + # OCSP + certStatus: message + hashAlgorithm: message + issuerKeyHash: message + issuerNameHash: message + nextUpdate: message + revokereason: message + revoketime: message + serialNumber: message + thisUpdate: message + # PE + compile_ts: message + has_cert_table: message + has_debug_data: message + has_import_table: message + has_export_table: message + is_64bit: message + is_exe: message + machine: message + os: message + section_names: message + subsystem: message + uses_aslr: message + uses_code_integrity: message + uses_dep: message + uses_seh: message + # POP3 + #arg: message + #command: message + current_request: message + current_response: message + data: message + failed_commands: message + has_client_activity: message + #is_orig: message + #msg: message + #password: + pending: message + #status: message + successful_commands: message + #username: sourceUserName + # Radius + connect_info: message + framed_addr: message + #mac:sourceMacAddress + #reply_msg: message + #result: + ttl: message + tunnel_client: message + #username: sourceUserName + # RDP + cert_count: message + cert_permanent: message + cert_type: message + client_build: message + client_dig_product_id: message + client_name: message + cookie: message + desktop_height: message + desktop_width: message + encryption_level: message + encryption_method: message + keyboard_layout: message + requested_color_depth: message + #result: + security_protocol: message + ssl: message + # RFB + #auth: + authentication_method: message + client_major_version: message + client_minor_version: message + desktop_name: message + height: message + server_major_version: message + server_minor_version: message + share_flag: message + width: message + # SIP + call_id: message + content_type: message + #date: message + #method: requestMethod + #reply_to: message + #request_body_len: message + request_from: message + request_path: message + request_to: message + #response_body_len: message + response_from: message + response_path: message + response_to: message + seq: message + #status_code: + #status_msg: message + #subject: message + #trans_depth: deviceCustomNumber1 + #uri: + warning: message + #user_agent: deviceCustomString5 + # SMB_Files + #action: + #name: fileName + #path: filePath + prev_name: message + size: fileSize + times_accessed: message + times_changed: message + times_created: message + times_modified: message + # SMB_Mapping + native_file_system: message + #path: filePath + share_type: message + #service: + # SMTP + cc: message + #date: message + first_received: message + #from: + helo: message + in_reply_to: message + is_webmail: message + last_reply: message + mailfrom: sourceUserName + #msg_id: message + #path: message + rcptto: message + #reply_to: message + second_received: message + #subject: message + tls: message + to: message + #trans_depth: deviceCustomNumber1 + x_originating_ip: message + #user_agent: deviceCustomString5 + # SMTP_Links + #host: + #uri: + # SNMP + #duration: + community: message + display_string: message + get_bulk_requests: message + get_requests: message + set_requests: message + up_since: message + #version: + # Socks + #password: message + bound_host: message + bound_name: message + bound_p: message + request_host: message + request_name: message + request_p: message + #status: message + #version: message + # Software + #host: + host_p: sourcePort + version.major: deviceCustomString3 + version.minor: deviceCustomString4 + version.minor2: message + version.minor3: message + #name: + unparsed_version: message + software_type: deviceEventClassId + #url: + # SSH + #auth_attempts: + auth_success: name + cipher_alg: message + #client: deviceCustomString5 + compression_alg: + cshka: message + direction: deviceDirection + hassh: message + hasshAlgorithms: message + hasshServer: message + hasshServerAlgorithms: message + hasshVersion: message + host_key: message + host_key_alg: message + kex_alg: message + mac_alg: message + server: deviceCustomString4 + #version: + # SSL / TLS + #cipher: deviceCustomString4 + client_issuer: deviceCustomString1 + client_subject: sourceUserName + curve: message + established: eventOutcome + issuer: deviceCustomString1 + ja3: message + ja3s: message + last_alert: message + next_protocol: message + notary: message + ocsp_status: message + orig_certificate_sha1: message + resp_certificate_sha1: message + resumed: message + #server_name: destinationHostName + #subject: message + valid_ct_logs: message + valid_ct_operators: message + valid_ct_operators_list: message + validation_status: message + #version: deviceCustomString2 + version_num: message + # Syslog + facility: message + severity: message + message: message + # Traceroute + #proto: transport + #dst: destinationAddress + #src: sourceAddress + # Tunnel + #action: deviceAction + tunnel_type: name + # Weird + #addl: message + #name: name + notice: message + peer: deviceCustomString4 + # X509 + basic_constraints.ca: message + basic_constraints.path_len: message + certificate.cn: message + certificate.curve: message + certificate.exponent: message + certificate.issuer: deviceCustomString3 + certificate.key_alg: message + certificate.key_length: message + certificate.key_type: message + certificate.not_valid_after: deviceCustomDate2 + certificate.not_valid_before: deviceCustomDate1 + certificate.serial: message + certificate.sig_alg: message + certificate.subject: message + certificate.version: message + logcert: message + san.dns: message + - destinationDnsDomain + - destinationHost + san.email: + - message + - sourceUserName + san.ip: + - message + - sourceAddress + san.uri: + - requestUrl + - requestUrlQuery \ No newline at end of file diff --git a/tools/config/crowdstrike.yml b/tools/config/crowdstrike.yml new file mode 100644 index 000000000..8a90c07e4 --- /dev/null +++ b/tools/config/crowdstrike.yml @@ -0,0 +1,19 @@ +title: Splunk Windows log source conditions +order: 20 +backends: + - crowdstrike +logsources: + windows-sysmon: + product: windows + service: sysmon + conditions: + EventID: 1 + process_creation_1: + category: process_creation + product: windows + +fieldmappings: + EventID: EventID + CommandLine: Commandline + Command_Line: Commandline + Image: ImageFileName diff --git a/tools/config/ecs-dns.yml b/tools/config/ecs-dns.yml new file mode 100644 index 000000000..d41c06398 --- /dev/null +++ b/tools/config/ecs-dns.yml @@ -0,0 +1,69 @@ +title: Elastic Common Schema mapping for proxy and webserver logs including NSM DNS logs (zeek/suricata) +order: 20 +backends: + - es-qs + - es-dsl + - elasticsearch-rule + - kibana + - xpack-watcher + - elastalert + - elastalert-dsl +# zeek-category-dns: + # category: dns + # conditions: + # event.dataset: dns +# zeek-dns: + # product: zeek + # service: dns + # conditions: + # event.dataset: dns +defaultindex: + - filebeat-* +# logsourcemerging: or +fieldmappings: + # All Logs Applied Mapping & Taxonomy + dst: + - destination.address + - destination.ip + dst_ip: + - destination.address + - destination.ip + dst_port: destination.port + src: + - source.address + - source.ip + src_ip: + - source.address + - source.ip + src_port: source.port + # DNS Taxonomy + answer: dns.answers.name + c-dns: dns.question.name + parent_domain: dns.question.registered_domain + query: dns.question.name + QueryName: dns.question.name + r-dns: dns.question.name + record_type: dns.answers.type + response: dns.answers + #question_length: + # Zeek DNS specific + AA: dns.AA + addl: dns.addl + answers: dns.answers.name + auth: dns.auth + qclass_name: dns.question.class + qclass: dns.qclass + qtype_name: dns.question.type + qtype: dns.qtype + query: dns.question.name + #question_length: labels.dns.query_length + RA: dns.RA + rcode_name: dns.response_code + rcode: dns.rcode + RD: dns.RD + rejected: dns.rejected + rtt: dns.rtt + TC: dns.TC + trans_id: dns.id + TTLs: dns.answers.ttl + Z: dns.Z diff --git a/tools/config/ecs-proxy.yml b/tools/config/ecs-proxy.yml index 9e75578c7..8dd4c4919 100644 --- a/tools/config/ecs-proxy.yml +++ b/tools/config/ecs-proxy.yml @@ -12,13 +12,198 @@ logsources: category: proxy index: filebeat-* fieldmappings: - c-uri: url.original + # All Logs Applied Mapping & Taxonomy + dst: + - destination.address + - destination.ip + dst_ip: + - destination.address + - destination.ip + dst_port: destination.port + src: + - source.address + - source.ip + src_ip: + - source.address + - source.ip + src_port: source.port + # Web/Proxy Taxonomy + cs-bytes: http.request.body.bytes + cs-cookie-vars: http.cookie_vars c-uri-extension: url.extension c-uri-query: url.query c-uri-stem: url.original + c-uri: url.original c-useragent: user_agent.original cs-cookie: http.cookie - cs-host: url.domain + cs-host: + - url.domain + - destination.domain cs-method: http.request.method - r-dns: url.domain + cs-referrer: http.request.referrer + cs-version: http.version + r-dns: + - destination.domain + - url.domain + sc-bytes: http.response.body.bytes sc-status: http.response.status_code + # Temporary one off rule name fields + agent.version: http.version + c-ip: + - source.address + - source.ip + clientip: + - source.address + - source.ip + clientIP: + - source.address + - source.ip + dest_domain: + - destination.domain + - url.domain + dest_ip: + - destination.address + - destination.ip + dest_port: destination.port + destination.hostname: + - destination.domain + - url.domain + DestinationAddress: + DestinationHostname: + - destination.domain + - url.domain + DestinationIp: + - destination.address + - destination.ip + DestinationIP: + - destination.address + - destination.ip + DestinationPort: destination.port + dst-ip: + - destination.address + - destination.ip + dstip: + - destination.address + - destination.ip + dstport: destination.port + Host: + - destination.domain + - url.domain + host: + - destination.domain + - url.domain + HostVersion: http.version + http_host: + - destination.domain + - url.domain + http_uri: url.original + http_url: url.original + http_user_agent: user_agent.original + http.request.url-query-params: url.original + HttpMethod: http.request.method + in_url: url.original + parent_domain: + - url.registered_domain + - destination.registered_domain + post_url_parameter: url.original + Request Url: url.original + request_url: url.original + request_URL: url.original + RequestUrl: url.original + response: http.response.status_code + resource.url: url.original + resource.URL: url.original + sc_status: http.response.status_code + sender_domain: + - destination.domain + - url.domain + service.response_code: http.response.status_code + source: + - source.address + - source.ip + SourceAddr: + - source.address + - source.ip + SourceAddress: + - source.address + - source.ip + SourceIP: + - source.address + - source.ip + SourceIp: + - source.address + - source.ip + SourceNetworkAddress: + - source.address + - source.ip + SourcePort: source.port + srcip: + - source.address + - source.ip + Status: http.response.status_code + status: http.response.status_code + url: url.original + URL: url.original + url_query: url.original + url.query: url.original + uri_path: url.original + user_agent: user_agent.original + user_agent.name: user_agent.original + user-agent: user_agent.original + User-Agent: user_agent.original + useragent: user_agent.original + UserAgent: user_agent.original + web_dest: + - url.domain + - destination.domain + web.dest: + - url.domain + - destination.domain + Web.dest: + - url.domain + - destination.domain + web.host: + - url.domain + - destination.domain + Web.host: + - url.domain + - destination.domain + web_method: http.request.method + Web_method: http.request.method + web.method: http.request.method + Web.method: http.request.method + web_src: + - source.address + - source.ip + web_status: http.response.status_code + Web_status: http.response.status_code + web.status: http.response.status_code + Web.status: http.response.status_code + web_uri: url.original + web_url: url.original + # Zeek HTTP as Proxy/Web + client_header_names: http.client_header_names + cookie_vars: http.cookie_vars + flash_version: http.flash_version + info_code: http.info_code + info_msg: http.info_msg + method: http.request.method + omniture: http.omniture + orig_filenames: http.orig_filenames + orig_mime_types: http.orig_mime_types + origin: http.origin + #password: source.user.password + post_body: http.post_body + proxied: http.proxied + referrer: http.request.referrer + request_body_len: http.request.body.bytes + resp_filenames: http.resp_filenames + resp_mime_types: http.resp_mime_types + response_body_len: http.response.body.bytes + server_header_names: http.server_header_names + status_code: http.response.status_code + status_msg: http.status_msg + trans_depth: http.trans_depth + uri_vars: http.uri_vars + username: source.user.name + version: http.version \ No newline at end of file diff --git a/tools/config/ecs-zeek-corelight.yml b/tools/config/ecs-zeek-corelight.yml new file mode 100644 index 000000000..6c3dae8b0 --- /dev/null +++ b/tools/config/ecs-zeek-corelight.yml @@ -0,0 +1,1182 @@ +title: Corelight Zeek and Corelight Opensource Zeek Elastic Common Schema (ECS) implementation +description: Uses the mappings as created by Corelight here https://github.com/corelight/ecs-mapping +order: 20 +backends: + - es-qs + - corelight_es-qs + - es-dsl + - elasticsearch-rule + - corelight_elasticsearch-rule + - kibana + - corelight_kibana + - xpack-watcher + - corelight_xpack-watcher + - elastalert + - elastalert-dsl +logsources: + zeek: + product: zeek + index: '*ecs-*' + #'*ecs-corelight*' + #'*ecs-zeek-* + zeek-category-accounting: + category: accounting + rewrite: + product: zeek + service: syslog + zeek-category-firewall: + category: firewall + conditions: + event.dataset: conn + zeek-category-dns: + category: dns + conditions: + event.dataset: dns + zeek-category-proxy: + category: proxy + rewrite: + product: zeek + service: http + zeek-category-webserver: + category: webserver + conditions: + event.dataset: http + rewrite: + product: zeek + service: http + zeek-conn: + product: zeek + service: conn + conditions: + event.dataset: conn + zeek-conn_long: + product: zeek + service: conn_long + conditions: + event.dataset: conn_long + zeek-dce_rpc: + product: zeek + service: dce_rpc + conditions: + event.dataset: dce_rpc + zeek-dns: + product: zeek + service: dns + conditions: + event.dataset: dns + zeek-dnp3: + product: zeek + service: dnp3 + conditions: + event.dataset: dnp3 + zeek-dpd: + product: zeek + service: dpd + conditions: + event.dataset: dpd + zeek-files: + product: zeek + service: files + conditions: + event.dataset: files + zeek-ftp: + product: zeek + service: ftp + conditions: + event.dataset: ftp + zeek-gquic: + product: zeek + service: gquic + conditions: + event.dataset: gquic + zeek-http: + product: zeek + service: http + conditions: + event.dataset: http + zeek-http2: + product: zeek + service: http2 + conditions: + event.dataset: http2 + zeek-intel: + product: zeek + service: intel + conditions: + event.dataset: intel + zeek-irc: + product: zeek + service: irc + conditions: + event.dataset: irc + zeek-kerberos: + product: zeek + service: kerberos + conditions: + event.dataset: kerberos + zeek-known_certs: + product: zeek + service: known_certs + conditions: + event.dataset: known_certs + zeek-known_hosts: + product: zeek + service: known_hosts + conditions: + event.dataset: known_hosts + zeek-known_modbus: + product: zeek + service: known_modbus + conditions: + event.dataset: known_modbus + zeek-known_services: + product: zeek + service: known_services + conditions: + event.dataset: known_services + zeek-modbus: + product: zeek + service: modbus + conditions: + event.dataset: modbus + zeek-modbus_register_change: + product: zeek + service: modbus_register_change + conditions: + event.dataset: modbus_register_change + zeek-mqtt_connect: + product: zeek + service: mqtt_connect + conditions: + event.dataset: mqtt_connect + zeek-mqtt_publish: + product: zeek + service: mqtt_publish + conditions: + event.dataset: mqtt_publish + zeek-mqtt_subscribe: + product: zeek + service: mqtt_subscribe + conditions: + event.dataset: mqtt_subscribe + zeek-mysql: + product: zeek + service: mysql + conditions: + event.dataset: mysql + zeek-notice: + product: zeek + service: notice + conditions: + event.dataset: notice + zeek-ntlm: + product: zeek + service: ntlm + conditions: + event.dataset: ntlm + zeek-ntp: + product: zeek + service: ntp + conditions: + event.dataset: ntp + zeek-ocsp: + product: zeek + service: ntp + conditions: + event.dataset: ocsp + zeek-pe: + product: zeek + service: pe + conditions: + event.dataset: pe + zeek-pop3: + product: zeek + service: pop3 + conditions: + event.dataset: pop3 + zeek-radius: + product: zeek + service: radius + conditions: + event.dataset: radius + zeek-rdp: + product: zeek + service: rdp + conditions: + event.dataset: rdp + zeek-rfb: + product: zeek + service: rfb + conditions: + event.dataset: rfb + zeek-sip: + product: zeek + service: sip + conditions: + event.dataset: sip + zeek-smb_files: + product: zeek + service: smb_files + conditions: + event.dataset: smb_files + zeek-smb_mapping: + product: zeek + service: smb_mapping + conditions: + event.dataset: smb_mapping + zeek-smtp: + product: zeek + service: smtp + conditions: + event.dataset: smtp + zeek-smtp_links: + product: zeek + service: smtp_links + conditions: + event.dataset: smtp_links + zeek-snmp: + product: zeek + service: snmp + conditions: + event.dataset: snmp + zeek-socks: + product: zeek + service: socks + conditions: + event.dataset: socks + zeek-software: + product: zeek + service: software + conditions: + event.dataset: software + zeek-ssh: + product: zeek + service: ssh + conditions: + event.dataset: ssh + zeek-ssl: + product: zeek + service: ssl + conditions: + event.dataset: tls + zeek-tls: # In case people call it TLS even though orig log is called ssl, but dataset is tls so may cause confusion so cover that + product: zeek + service: tls + conditions: + event.dataset: tls + zeek-syslog: + product: zeek + service: syslog + conditions: + event.dataset: syslog + zeek-tunnel: + product: zeek + service: tunnel + conditions: + event.dataset: tunnel + zeek-traceroute: + product: zeek + service: traceroute + conditions: + event.dataset: traceroute + zeek-weird: + product: zeek + service: weird + conditions: + event.dataset: weird + zeek-x509: + product: zeek + service: x509 + conditions: + event.dataset: x509 + zeek-ip_search: + product: zeek + service: network + conditions: + event.dataset: + - conn + - conn_long + - dce_rpc + - dhcp + - dnp3 + - dns + - ftp + - gquic + - http + - irc + - kerberos + - modbus + - mqtt_connect + - mqtt_publish + - mqtt_subscribe + - mysql + - ntlm + - ntp + - radius + - rfb + - sip + - smb_files + - smb_mapping + - smtp + - smtp_links + - snmp + - socks + - ssh + - tls #SSL + - tunnel + - weird +defaultindex: '*ecs-*' +fieldmappings: + # All Logs Applied Mapping & Taxonomy + dst: destination.ip + dst_ip: destination.ip + dst_port: destination.port + host: host.ip + inner_vlan: network.vlan.inner.id + mac: source.mac + mime_type: file.mime_type + network_application: network.protocol + network_community_id: network.community_id + network_protocol: network.transport + password: source.user.password + port_num: labels.known.port + proto: network.transport + result: event.outcome + rtt: event.duration + server_name: destination.domain + src: source.ip + src_ip: source.ip + src_port: source.port + success: event.outcome + uri: url.original + user: source.user.name + username: source.user.name + user_agent: user_agent.original + vlan: network.vlan.id + # DNS matching Taxonomy & DNS Category + answer: dns.answers.name + question_length: labels.dns.query_length + record_type: dns.question.type + parent_domain: dns.question.registered_domain + # HTTP matching Taxonomy & Web/Proxy Category + cs-bytes: http.request.body.bytes + cs-cookie: http.cookie_vars + r-dns: + - url.domain + - destination.domain + sc-bytes: http.response.body.bytes + sc-status: http.response.status_code + c-uri: url.original + c-uri-extension: url.extension + c-uri-query: url.query + c-uri-stem: url.original + c-useragent: user_agent.original + cs-host: + - url.domain + - destination.domain + cs-method: http.request.method + cs-referrer: http.request.referrer + cs-version: http.version + # All log UIDs + cert_chain_fuids: log.id.cert_chain_fuids + client_cert_chain_fuids: log.id.client_cert_chain_fuids + client_cert_fuid: log.id.client_cert_fuid + conn_uids: log.id.conn_uids + fid: log.id.fid + fuid: log.id.fuid + fuids: log.id.fuids + id: log.id.id + orig_fuids: log.id.orig_fuids + parent_fuid: log.id.parent_fuid + related_fuids: log.id.related_fuids + resp_fuids: log.id.resp_fuids + server_cert_fuid: log.id.server_cert_fuid + tunnel_parents: log.id.tunnel_parents + uid: log.id.uid + uids: log.id.uids + uuid: log.id.uuid + # Overlapping fields/mappings (aka: shared fields) + action: + #- smb.action + - '*.action' + #service=smb_files: smb.action + #service=mqtt: mqtt.action + #service=tunnel: tunnel.action + addl: + #- weird.addl + - '*.addl' + #service=dns: dns.addl + #service=weird: weird.addl + analyzer: + #- dpd.analyzer + - '*.analyzer' + #service=dpd: dpd.analyzer + #service=files: files.analyzer + arg: + #- ftp.arg + - '*.arg' + #service=ftp: ftp.arg + #service=ftp: pop3.arg + #service=msqyl: mysql.arg + #auth: + #service=rfb: rfb.auth #RFB does not exist in newer logs, so skipping to cover dns.auth + cipher: + #- kerberos.cipher + - '*.client' + #service=kerberos: kerberos.cipher + #service=ssl: tls.cipher + client: + #- ssh.client + - '*.client' + #service=kerberos: kerberos.client + #service=ssh: ssh.client + command: + #- ftp.command + - '*.command' + #service=pop3: pop3.command + #service=ftp: ftp.command + #service=irc: irc.command + date: + #- smtp.date + - '*.date' + #service=sip: sip.date + #service=smtp: smtp.date + duration: + - event.duration + #- '*.duration' + #service=conn: event.duration + #service=files: files.duration + #service=snmp: event.duration + from: + #- smtp.from + - '*.from' + #service=kerberos: kerberos.from + #service=smtp: smtp.from + is_orig: + - '*.is_orig' + #service=file: file.is_orig + #service=pop3: pop3.is_orig + local_orig: + - '*.local_orig' + #service=conn conn.local_orig + #service=files file.local_orig + method: + - http.request.method + #service=http: http.request.method + #service=sip: sip.method + msg: + - notice.msg + #service=notice: notice.msg + #service=pop3: pop3.msg + name: + - file.name + #- '*.name' + #service=smb_files: file.name + #service=software: software.name + #service=weird: weird.name + path: + - file.path + #- '*.path' + #service=smb_files: file.path + #service=smb_mapping: file.path + #service=smtp: smtp.path + reply_msg: + #- ftp.reply_msg + - '*.reply_msg' + #service=ftp: ftp.reply_msg + #service=radius: radius.reply_msg + reply_to: + #- smtp.reply_to + - '*.reply_to' + #service=sip: sip.reply_to + #service=smtp: smtp.reply_to + response_body_len: + - http.response.body.bytes + #service=http: http.response.body.bytes + #service=sip: sip.response_body_len + request_body_len: + - http.request.body.bytes + #service=http: http.response.body.bytes + #service=sip: sip.request_body_len + service: + #- kerberos.service + - '*.service' + #service=kerberos: kerberos.service + #service=smb_mapping: smb.service + status: + #- socks.status + - '*.status' + #service=pop3: pop3.status + #service=mqtt: mqtt.status + #service=socks: socks.status + status_code: + - 'http.response.status_code' + #service=http: http.response.status_code + #service=sip: sip.status_code + status_msg: + - http.status_msg + #- '*.status_msg' + #service=http: http.status_msg + #service=sip: sip.status_msg + subject: + #- smtp.subject + - '*.subject' + #service=known_certs: known_certs.subject + #service=sip: sip.subject + #service=smtp: smtp.subject + #service=ssl: tls.subject + trans_depth: + #- http.trans_depth + - '*.trans_depth' + #service=http: http.trans_depth + #service=sip: sip.trans_depth + #service=smtp: smtp.trans_depth + version: + #- tls.version + - '*.version' + #service=gquic: gquic.version + #service=ntp: ntp.version + #service=socks: socks.version + #service=snmp: snmp.version + #service=ssh: ssh.version + #service=tls: tls.version + # Conn and Conn Long + cache_add_rx_ev: conn.cache_add_rx_ev + cache_add_rx_mpg: conn.cache_add_rx_mpg + cache_add_rx_new: conn.cache_add_rx_new + cache_add_tx_ev: conn.cache_add_tx_ev + cache_add_tx_mpg: conn.cache_add_tx_mpg + cache_del_mpg: conn.cache_del_mpg + cache_entries: conn.cache_entries + conn_state: conn.conn_state + corelight_shunted: conn.corelight_shunted + history: conn.history + id.orig_h.name_src: conn.id.orig_h_name.src + id.orig_h.names_vals: conn.id.orig_h_names.vals + id.resp_h.name_src: conn.id.resp_h_name.src + id.resp_h.name_vals: conn.id.resp_h_name.vals + #local_orig: conn.local_orig + local_resp: conn.local_resp + missed_bytes: conn.missed_bytes + orig_bytes: source.bytes + orig_cc: source.geo.country_iso_code + orig_ip_bytes: conn.orig_ip_bytes + orig_l2_addr: source.mac + orig_pkts: source.packets + resp_bytes: destination.bytes + resp_cc: destination.geo.country_iso_code + resp_ip_bytes: conn.resp.ip_bytes + resp_l2_addr: destination.mac + resp_pkts: destination.packets + # DCE-RPC Specific + endpoint: dce_rpc.endpoint + named_pipe: dce_rpc.named_pipe + operation: dce_rpc.operation + #rtt: dce_rpc.rtt + # DHCP + domain: source.domain + host_name: source.hostname + lease_time: dhcp.lease_time + agent_remote_id: dhcp.agent_remote_id + assigned_addr: dhcp.assigned_addr + circuit_id: dhcp.circuit_id + client_message: dhcp.client_message + client_software: dhcp.client_software + client_fqdn: source.fqdn + #mac: source.mac + msg_orig: dhcp.msg_orig + msg_types: dhcp.msg_types + requested_addr: dhcp.requested_addr + server_addr: destination.ip + server_message: dhcp.server_message + server_software: dhcp.server_software + subscriber_id: dhcp.subscriber_id + # DNS + AA: dns.AA + #addl: dns.addl + auth: dns.auth + answers: dns.answers.name + TTLs: dns.answers.ttl + RA: dns.RA + RD: dns.RD + rejected: dns.rejected + TC: dns.TC + Z: dns.Z + qclass: dns.qclass + qclass_name: dns.question.class + qtype: dns.qtype + qtype_name: dns.question.type + query: dns.question.name + rcode_name: dns.response_code + rcode: dns.rcode + #rtt: dns.rtt + trans_id: dns.id + # DNP3 + fc_reply: dnp3.fc_reply + fc_request: dnp3.fc_request + iin: dnp3.inn + # DPD + #analyzer: dpd.analyzer + failure_reason: dpd.failure_reason + packet_segment: dpd.packet_segment + # Files + rx_hosts: source.ip + tx_hosts: destination.ip + #analyzer: files.analyzer + depth: files.depth + #duration: files.duration + extracted: files.extracted + extracted_cutoff: files.extracted_cutoff + extracted_size: files.extracted_size + entropy: files.entropy + md5: file.hash.md5 + sha1: file.hash.sha1 + sha256: file.hash.sha256 + #is_orig: file.is_orig + #local_orig: files.local_orig + missing_bytes: files.missing_bytes + filename: file.name + overflow_bytes: files.overflow_bytes + seen_bytes: files.seen_bytes + source: network.protocol + total_bytes: file.size + timedout: files.timedout + # GQUIC/QUIC + cyu: gquic.cyu + cyutags: gquic.cyutags + #server_name: destination.domain + tag_count: gquic.tag_count + #user_agent: user_agent.original + #version: gquic.version + # FTP + #arg: ftp.arg + #command: ftp.command + cwd: ftp.cwd + data_channel.orig_h: ftp.data_channel.orig_h + data_channel.passive: ftp.data_channel.passive + data_channel.resp_h: ftp.data_channel.resp_h + data_channel.resp_p: ftp.data_channel.resp_p + passive: ftp.passive + file_size: file.size + #mime_type: file.mime_type + #password: ftp.password + reply_code: ftp.reply_code + #reply_msg: ftp.reply_msg + #user: source.user.name + # HTTP + client_header_names: http.client_header_names + cookie_vars: http.cookie_vars + flash_version: http.flash_version + info_code: http.info_code + info_msg: http.info_msg + #method: http.request.method + omniture: http.omniture + orig_filenames: http.orig_filenames + orig_mime_types: http.orig_mime_types + origin: http.origin + #password: source.user.password + #response_body_len: http.response.body.bytes + #request_body_len: http.request.body.bytes + referrer: http.request.referrer + post_body: http.post_body + proxied: http.proxied + resp_filenames: http.resp_filenames + resp_mime_types: http.resp_mime_types + server_header_names: http.server_header_names + #status_code: http.response.status_code + #status_msg: http.status_msg + #trans_depth: http.trans_depth + uri_vars: http.uri_vars + #user_agent: user_agent.original + #username: source.user.name + # Intel + file_mime_type: file.mime_type + file_desc: intel.file_desc + #host: host.ip + matched: intel.matched + indicator: intel.seen.indicator + indicator_type: intel.seen.indicator_type + node: intel.seen.node + where: intel.seen.where + sources: intel.seen.sources + # IRC + dcc_file_name: file.name + dcc_file_size: file.size + dcc_mime_type: file.mime_type + #command: irc.command + nick: irc.nick + #user: source.user.name + value: irc.command + # Kerberos + auth_ticket: kerberos.auth_ticket + #cipher: kerberos.cipher + #client: kerberos.client + client_cert_subject: kerberos.client_cert_subject + error_code: kerberos.error_code + error_msg: kerberos.error_msg + #from: kerberos.from + forwardable: kerberos.forwardable + new_ticket: kerberos.new_ticket + renewable: kerberos.renewable + request_type: kerberos.request_type + server_cert_subject: kerberos.server_cert_subject + #service: kerberos.service + #success: event.outcome + till: kerberos.till + # Known_Certs + #host: host.ip + issuer_subject: known_certs.issuer_subject + #port_num: labels.known.port + serial: known_certs.serial + #subject: known_certs.subject + # Known_Modbus + #host: host.ip + device_type: known_modbus.device_type + # Known_Services + port_proto: network.transport + #port_num: labels.known.port + # Modbus All + delta: modbus.delta + new_val: modbus.new_val + old_val: modbus.old_val + register: modbus.register + # Modbus + func: modbus.func + exception: modbus.exception + track_address: modbus.track_address + # ModBus_Register_Change + #delta: modbus.delta + #new_val: modbus.new_val + #old_val: modbus.old_val + #register: modbus.register + # MQTT_Connect , MQTT_Publish, MQTT_Subscribe + ack: mqtt.ack + #action: mqtt.action + client_id: mqtt.client_id + connect_status: mqtt.connect_status + from_client: mqtt.from_client + granted_qos_level: mqtt.granted_qos_level + payload: mqtt.payload + payload_len: mqtt.payload_len + proto_name: mqtt.proto_name + proto_version: mqtt.proto_version + qos: mqtt.qos + qos_levels: mqtt.qos_levels + retain: mqtt.retain + #status: mqtt.status + topic: mqtt.topic + topics: mqtt.topics + will_payload: mqtt.will_payload + will_topic: mqtt.will_topic + # MYSQL + #arg: mysql.arg + cmd: mysql.command + response: mysql.response + rows: mysql.rows + #success: event.outcome + # Notice + actions: notice.actions + dropped: notice.dropped + #dst: destination.ip + email_body_sections: notice.email_body_sections + email_delay_tokens: notice.email_delay_tokens + identifier: notice.identifier + #msg: notice.msg + n: notice.n + note: notice.note + p: destination.port + peer_descr: notice.peer_descr + peer_name: notice.peer_name + #proto: network.transport + #src: source.ip + sub: notice.sub + subpress_for: notice.subpress_for + # NTLM + domainname: ntlm.domainname + hostname: ntlm.hostname + #username: source.user.name + server_nb_computer_name: ntlm.server_nb_computer_name + server_tree_name: ntlm.server_tree_name + #success: event.outcome + server_dns_computer_name: ntlm.server_dns_computer_name + # NTP + mode: ntp.mode + num_exts: ntp.num_exts + org_time: ntp.org_time + poll: ntp.poll + precision: ntp.precision + rec_time: ntp.rec_time + ref_id: ntp.ref_id + ref_time: ntp.ref_time + root_delay: ntp.root_delay + root_disp: ntp.root_disp + stratum: ntp.stratum + #version: ntp.version + xmt_time: ntp.xmt_time + # OCSP + certStatus: oscp.certStatus + hashAlgorithm: oscp.hashAlgorithm + issuerKeyHash: oscp.issuerKeyHash + issuerNameHash: oscp.issuerNameHash + nextUpdate: oscp.nextUpdate + revokereason: oscp.revokereason + revoketime: oscp.revoketime + serialNumber: oscp.serialNumber + thisUpdate: oscp.thisUpdate + # PE + compile_ts: pe.compile_ts + has_cert_table: pe.has_cert_table + has_debug_data: pe.has_debug_data + has_import_table: pe.has_import_table + has_export_table: pe.has_export_table + is_64bit: pe.is_64bit + is_exe: pe.is_exe + machine: pe.machine + os: pe.os + section_names: pe.section_names + subsystem: pe.subsystem + uses_aslr: pe.uses_aslr + uses_code_integrity: pe.uses_code_integrity + uses_dep: pe.uses_dep + uses_seh: pe.uses_seh + # POP3 + #arg: pop3.arg + #command: pop3.command + current_request: pop3.current_request + current_response: pop3.current_response + data: pop3.data + failed_commands: pop3.failed_commands + has_client_activity: pop3.has_client_activity + #is_orig: pop3.is_orig + #msg: pop3.msg + #password: source.user.password + pending: pop3.pending + #status: pop3.status + successful_commands: pop3.successful_commands + #username: source.user.name + # Radius + connect_info: radius.connect_info + framed_addr: radius.framed_addr + #mac: source.mac + #reply_msg: radius.reply_msg + #result: event.outcome + ttl: event.duration + tunnel_client: radius.tunnel_client + #username: source.user.name + # RDP + cert_count: rdp.cert_count + cert_permanent: rdp.cert_permanent + cert_type: rdp.cert_type + client_build: rdp.client_build + client_dig_product_id: rdp.client_dig_product_id + client_name: source.hostname + cookie: rdp.cookie + desktop_height: rdp.desktop_height + desktop_width: rdp.desktop_width + encryption_level: rdp.encryption_level + encryption_method: rdp.encryption_method + keyboard_layout: rdp.keyboard_layout + requested_color_depth: rdp.requested_color_depth + #result: event.outcome + security_protocol: rdp.security_protocol + ssl: rdp.ssl + # RFB + #auth: event.outcome + authentication_method: rfb.authentication_method + client_major_version: rfb.client_major_version + client_minor_version: rfb.client_minor_version + desktop_name: destination.hostname + height: rfb.height + server_major_version: rfb.server_major_version + server_minor_version: rfb.server_minor_version + share_flag: rfb.share_flag + width: rfb.width + # SIP + call_id: sip.call_id + content_type: sip.content_type + #date: sip.date + #method: sip.method + #reply_to: sip.reply_to + #request_body_len: sip.request_body_len + request_from: sip.request_from + request_path: sip.request_path + request_to: sip.request_to + #response_body_len: sip.response_body_len + response_from: sip.response_from + response_path: sip.response_path + response_to: sip.response_to + seq: sip.seq + #status_code: sip.status_code + #status_msg: sip.status_msg + #subject: sip.subject + #trans_depth: sip.trans_depth + #uri: url.original + warning: sip.warning + #user_agent: user_agent.original + # SMB_Files + #action: smb.action + #name: file.name + #path: file.path + prev_name: smb.prev_name + size: file.size + times_accessed: file.accessed + times_changed: file.ctime + times_created: file.created + times_modified: file.mtime + # SMB_Mapping + native_file_system: smb.native_file_system + #path: file.path + share_type: smb.share_type + #service: smb.service + # SMTP + cc: smtp.cc + #date: smtp.date + first_received: smtp.first_received + #from: smtp.from + helo: smtp.helo + in_reply_to: smtp.in_reply_to + is_webmail: smtp.is_webmail + last_reply: smtp.last_reply + mailfrom: smtp.mailfrom + msg_id: smtp.msg_id + #path: smtp.path + rcptto: smtp.rcptto + #reply_to: smtp.reply_to + second_received: smtp.second_received + #subject: smtp.subject + tls: smtp.tls + to: smtp.to + #trans_depth: smtp.trans_depth + x_originating_ip: smtp.x_originating_ip + #user_agent: user_agent.original + # SMTP_Links + #cs-host: url.domain + #c-uri: url.original + # SNMP + #duration: event.duration + community: snmp.community + display_string: snmp.display_string + get_bulk_requests: snmp.get_bulk_requests + get_requests: snmp.get_requests + set_requests: snmp.set_requests + up_since: snmp.up_since + #version: snmp.version + # Socks + #password: source.user.password + bound_host: socks.bound_host + bound_name: socks.bound_name + bound_p: socks.bound_p + request_host: socks.request_host + request_name: socks.request_name + request_p: socks.request_p + #status: socks.status + #version: socks.version + # Software + #host: host.ip + host_p: software.host_port + version.major: software.version.major + version.minor: software.version.minor + version.minor2: software.version.minor2 + version.minor3: software.version.minor3 + #name: software.name + unparsed_version: software.unparsed_version + software_type: software.software_type + #url: url.original + # SSH + auth_attempts: ssh.auth_attempts + auth_success: event.outcome + cipher_alg: ssh.cipher_alg + #client: ssh.client + compression_alg: ssh.compression_alg + cshka: ssh.cshka + direction: network.direction + hassh: ssh.hassh + hasshAlgorithms: ssh.hasshAlgorithms + hasshServer: ssh.hasshServer + hasshServerAlgorithms: ssh.hasshServerAlgorithms + hasshVersion: ssh.hasshVersion + host_key: ssh.host_key + host_key_alg: ssh.host_key_alg + kex_alg: ssh.kex_alg + mac_alg: ssh.mac_alg + server: ssh.server + #version: ssh.version + # SSL / TLS + #cipher: tls.cipher + client_issuer: tls.client.issuer + client_subject: tls.client.subject + curve: tls.curve + established: tls.established + issuer: tls.server.issuer + ja3: tls.client.ja3 + ja3s: tls.client.ja3s + last_alert: ssl.last_alert + next_protocol: tls.next_protocol + notary: ssl.notary + ocsp_status: ssl.oscp_status + orig_certificate_sha1: tls.client.hash.sha1 + resp_certificate_sha1: tls.server.hash.sha1 + resumed: tls.resumed + #server_name: tls.client.server_name + #subject: tls.server.subject + valid_ct_logs: ssl.valid_ct_logs + valid_ct_operators: ssl.validct_operators + valid_ct_operators_list: ssl.valid_ct_operators_list + validation_status: ssl.validation_status + #version: tls.version + version_num: ssl.version_num + # Syslog + facility: log.syslog.facility.name + severity: log.syslog.severity.name + message: syslog.message + # Traceroute + #proto: network.transport + #dst: destination.ip + #src: source.ip + # Tunnel + #action: tunnel.action + tunnel_type: tunnel.tunnel_type + # Weird + #addl: weird.addl + #name: weird.name + notice: weird.notice + peer: weird.peer + # X509 + basic_constraints.ca: x509.certificate.basic_constraints_ca + basic_constraints.path_len: x509.certificate.basic_constraints_path_length + certificate.cn: x509.certificate.cn + certificate.curve: x509.certificate.curve + certificate.exponent: x509.certificate.exponent + certificate.issuer: x509.certificate.issuer + certificate.key_alg: x509.certificate.key_alg + certificate.key_length: x509.certificate.key_length + certificate.key_type: x509.certificate.key_type + certificate.not_valid_after: x509.certificate.not_valid_after + certificate.not_valid_before: x509.certificate.not_valid_before + certificate.serial: x509.certificate.serial + certificate.sig_alg: x509.certificate.sig_alg + certificate.subject: x509.certificate.subject + certificate.version: x509.certificate.version + logcert: x509.logcert + san.dns: x509.san.dns + san.email: x509.san.email + san.ip: x509.san.ip + san.uri: x509.san.url + # Temporary one off rule name fields + cs-uri: url.original + # destination.domain: + # destination.ip: + # destination.port: + # http.response.status_code + # http.request.body.content + # source.domain: + # source.ip: + # source.port: + agent.version: http.version + c-ip: source.ip + clientip: source.ip + clientIP: source.ip + dest_domain: + - destination.domain + - url.domain + dest_ip: destination.ip + dest_port: destination.port + #TODO:WhatShouldThisBe?==dest: + #TODO:WhatShouldThisBe?==destination: + #TODO:WhatShouldThisBe?==Destination: + destination.hostname: + - destination.domain + - url.domain + DestinationAddress: + DestinationHostname: + - destination.domain + - url.domain + DestinationIp: destination.ip + DestinationIP: destination.ip + DestinationPort: destination.port + dst-ip: destination.ip + dstip: destination.ip + dstport: destination.port + Host: + - destination.domain + - url.domain + #host: + # - destination.domain + # - url.domain + HostVersion: http.version + http_host: + - destination.domain + - url.domain + http_uri: url.original + http_url: url.original + http_user_agent: user_agent.original + http.request.url-query-params: url.original + HttpMethod: http.request.method + in_url: url.original + #parent_domain: + # - url.registered_domain + # - destination.registered_domain + post_url_parameter: url.original + Request Url: url.original + request_url: url.original + request_URL: url.original + RequestUrl: url.original + #response: http.response.status_code + resource.url: url.original + resource.URL: url.original + sc_status: http.response.status_code + sender_domain: + - destination.domain + - url.domain + service.response_code: http.response.status_code + SourceAddr: + - source.address + - source.ip + SourceAddress: source.ip + SourceIP: source.ip + SourceIp: source.ip + SourceNetworkAddress: + - source.address + - source.ip + SourcePort: source.port + srcip: source.ip + Status: http.response.status_code + #status: http.response.status_code + url: url.original + URL: url.original + url_query: url.original + url.query: url.original + uri_path: url.original + #user_agent: user_agent.original + user_agent.name: user_agent.original + user-agent: user_agent.original + User-Agent: user_agent.original + useragent: user_agent.original + UserAgent: user_agent.original + User Agent: user_agent.original + web_dest: + - url.domain + - destination.domain + web.dest: + - url.domain + - destination.domain + Web.dest: + - url.domain + - destination.domain + web.host: + - url.domain + - destination.domain + Web.host: + - url.domain + - destination.domain + web_method: http.request.method + Web_method: http.request.method + web.method: http.request.method + Web.method: http.request.method + web_src: source.ip + web_status: http.response.status_code + Web_status: http.response.status_code + web.status: http.response.status_code + Web.status: http.response.status_code + web_uri: url.original + web_url: url.original \ No newline at end of file diff --git a/tools/config/elk-defaultindex-filebeat.yml b/tools/config/elk-defaultindex-filebeat.yml new file mode 100644 index 000000000..24f52574d --- /dev/null +++ b/tools/config/elk-defaultindex-filebeat.yml @@ -0,0 +1,2 @@ +defaultindex: + - filebeat-* diff --git a/tools/config/elk-defaultindex-logstash.yml b/tools/config/elk-defaultindex-logstash.yml new file mode 100644 index 000000000..7c8261991 --- /dev/null +++ b/tools/config/elk-defaultindex-logstash.yml @@ -0,0 +1,2 @@ +defaultindex: + - logstash-* diff --git a/tools/config/elk-defaultindex.yml b/tools/config/elk-defaultindex.yml new file mode 100644 index 000000000..99a94b8fd --- /dev/null +++ b/tools/config/elk-defaultindex.yml @@ -0,0 +1,3 @@ +defaultindex: + - logstash-* + - filebeat-* diff --git a/tools/config/elk-linux.yml b/tools/config/elk-linux.yml new file mode 100644 index 000000000..9b2d48083 --- /dev/null +++ b/tools/config/elk-linux.yml @@ -0,0 +1,15 @@ +logsources: + apache: + category: webserver + index: logstash-apache-* + webapp-error: + category: application + index: logstash-apache_error-* + linux-auth: + product: linux + service: auth + index: logstash-auth-* +fieldmappings: + client_ip: clientip + url: request +defaultindex: logstash-* diff --git a/tools/config/elk-windows.yml b/tools/config/elk-windows.yml new file mode 100644 index 000000000..a408123c8 --- /dev/null +++ b/tools/config/elk-windows.yml @@ -0,0 +1,30 @@ +logsources: + windows: + product: windows + index: logstash-windows-* + windows-application: + product: windows + service: application + conditions: + EventLog: Application + windows-security: + product: windows + service: security + conditions: + EventLog: Security + windows-sysmon: + product: windows + service: sysmon + conditions: + EventLog: Microsoft-Windows-Sysmon + windows-dns-server: + product: windows + service: dns-server + conditions: + EventLog: 'DNS Server' + windows-driver-framework: + product: windows + service: driver-framework + conditions: + source: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational' +defaultindex: logstash-* diff --git a/tools/config/elk-winlogbeat-sp.yml b/tools/config/elk-winlogbeat-sp.yml new file mode 100644 index 000000000..f1abce0a2 --- /dev/null +++ b/tools/config/elk-winlogbeat-sp.yml @@ -0,0 +1,95 @@ +logsources: + windows: + product: windows + index: + + windows-application: + product: windows + service: application + conditions: + log_name: Application + windows-security: + product: windows + service: security + conditions: + log_name: Security + windows-sysmon: + product: windows + service: sysmon + conditions: + log_name: 'Microsoft-Windows-Sysmon/Operational' + windows-dns-server: + product: windows + service: dns-server + conditions: + log_name: 'DNS Server' + windows-driver-framework: + product: windows + service: driver-framework + conditions: + source: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational' +defaultindex: +# Extract all field names qith yq: +# yq -r '.detection | del(.condition) | map(keys) | .[][]' $(find sigma/rules/windows -name '*.yml') | sort -u | grep -v ^EventID$ | sed 's/^\(.*\)/ \1: event_data.\1/g' +# Keep EventID! Clean up the list afterwards! +fieldmappings: + EventID: event_id + AccessMask: event_data.AccessMask + AccountName: event_data.AccountName + AllowedToDelegateTo: event_data.AllowedToDelegateTo + AttributeLDAPDisplayName: event_data.AttributeLDAPDisplayName + AuditPolicyChanges: event_data.AuditPolicyChanges + AuthenticationPackageName: event_data.AuthenticationPackageName + CallingProcessName: event_data.CallingProcessName + CallTrace: event_data.CallTrace + CommandLine: event_data.CommandLine + ComputerName: event_data.ComputerName + CurrentDirectory: event_data.CurrentDirectory + Description: event_data.Description + DestinationHostname: event_data.DestinationHostname + DestinationIp: event_data.DestinationIp + DestinationIsIpv6: event_data.DestinationIsIpv6 + DestinationPort: event_data.DestinationPort + Details: event_data.Details + EngineVersion: event_data.EngineVersion + EventType: event_data.EventType + FailureCode: event_data.FailureCode + FileName: event_data.FileName + GrantedAccess: event_data.GrantedAccess + GroupName: event_data.GroupName + Hashes: event_data.Hashes + HiveName: event_data.HiveName + HostVersion: event_data.HostVersion + Image: event_data.Image + ImageLoaded: event_data.ImageLoaded + ImagePath: event_data.ImagePath + Imphash: event_data.Imphash + LogonProcessName: event_data.LogonProcessName + LogonType: event_data.LogonType + NewProcessName: event_data.NewProcessName + ObjectClass: event_data.ObjectClass + ObjectName: event_data.ObjectName + ObjectType: event_data.ObjectType + ObjectValueName: event_data.ObjectValueName + ParentCommandLine: event_data.ParentCommandLine + ParentImage: event_data.ParentImage + Path: event_data.Path + PipeName: event_data.PipeName + ProcessName: event_data.ProcessName + Properties: event_data.Properties + ServiceFileName: event_data.ServiceFileName + ServiceName: event_data.ServiceName + ShareName: event_data.ShareName + Signature: event_data.Signature + Source: event_data.Source + SourceImage: event_data.SourceImage + StartModule: event_data.StartModule + Status: event_data.Status + SubjectUserName: event_data.SubjectUserName + TargetFilename: event_data.TargetFilename + TargetImage: event_data.TargetImage + TargetObject: event_data.TargetObject + TicketEncryptionType: event_data.TicketEncryptionType + TicketOptions: event_data.TicketOptions + User: event_data.User + WorkstationName: event_data.WorkstationName diff --git a/tools/config/elk-winlogbeat.yml b/tools/config/elk-winlogbeat.yml new file mode 100644 index 000000000..20bf500fc --- /dev/null +++ b/tools/config/elk-winlogbeat.yml @@ -0,0 +1,94 @@ +logsources: + windows: + product: windows + index: winlogbeat-* + windows-application: + product: windows + service: application + conditions: + log_name: Application + windows-security: + product: windows + service: security + conditions: + log_name: Security + windows-sysmon: + product: windows + service: sysmon + conditions: + log_name: 'Microsoft-Windows-Sysmon/Operational' + windows-dns-server: + product: windows + service: dns-server + conditions: + log_name: 'DNS Server' + windows-driver-framework: + product: windows + service: driver-framework + conditions: + source: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational' +defaultindex: winlogbeat-* +# Extract all field names qith yq: +# yq -r '.detection | del(.condition) | map(keys) | .[][]' $(find sigma/rules/windows -name '*.yml') | sort -u | grep -v ^EventID$ | sed 's/^\(.*\)/ \1: event_data.\1/g' +# Keep EventID! Clean up the list afterwards! +fieldmappings: + EventID: event_id + AccessMask: event_data.AccessMask + AccountName: event_data.AccountName + AllowedToDelegateTo: event_data.AllowedToDelegateTo + AttributeLDAPDisplayName: event_data.AttributeLDAPDisplayName + AuditPolicyChanges: event_data.AuditPolicyChanges + AuthenticationPackageName: event_data.AuthenticationPackageName + CallingProcessName: event_data.CallingProcessName + CallTrace: event_data.CallTrace + CommandLine: event_data.CommandLine + ComputerName: event_data.ComputerName + CurrentDirectory: event_data.CurrentDirectory + Description: event_data.Description + DestinationHostname: event_data.DestinationHostname + DestinationIp: event_data.DestinationIp + DestinationIsIpv6: event_data.DestinationIsIpv6 + DestinationPort: event_data.DestinationPort + Details: event_data.Details + EngineVersion: event_data.EngineVersion + EventType: event_data.EventType + FailureCode: event_data.FailureCode + FileName: event_data.FileName + GrantedAccess: event_data.GrantedAccess + GroupName: event_data.GroupName + Hashes: event_data.Hashes + HiveName: event_data.HiveName + HostVersion: event_data.HostVersion + Image: event_data.Image + ImageLoaded: event_data.ImageLoaded + ImagePath: event_data.ImagePath + Imphash: event_data.Imphash + LogonProcessName: event_data.LogonProcessName + LogonType: event_data.LogonType + NewProcessName: event_data.NewProcessName + ObjectClass: event_data.ObjectClass + ObjectName: event_data.ObjectName + ObjectType: event_data.ObjectType + ObjectValueName: event_data.ObjectValueName + ParentCommandLine: event_data.ParentCommandLine + ParentImage: event_data.ParentImage + Path: event_data.Path + PipeName: event_data.PipeName + ProcessName: event_data.ProcessName + Properties: event_data.Properties + ServiceFileName: event_data.ServiceFileName + ServiceName: event_data.ServiceName + ShareName: event_data.ShareName + Signature: event_data.Signature + Source: event_data.Source + SourceImage: event_data.SourceImage + StartModule: event_data.StartModule + Status: event_data.Status + SubjectUserName: event_data.SubjectUserName + TargetFilename: event_data.TargetFilename + TargetImage: event_data.TargetImage + TargetObject: event_data.TargetObject + TicketEncryptionType: event_data.TicketEncryptionType + TicketOptions: event_data.TicketOptions + User: event_data.User + WorkstationName: event_data.WorkstationName diff --git a/tools/config/filebeat-zeek-ecs.yml b/tools/config/filebeat-zeek-ecs.yml new file mode 100644 index 000000000..9000db4f0 --- /dev/null +++ b/tools/config/filebeat-zeek-ecs.yml @@ -0,0 +1,468 @@ +title: Zeek field mappings for default collection of JSON logs with no parsing/normalization done and sending into logstash-*index +order: 20 +backends: + - es-qs + - es-dsl + - elasticsearch-rule + - kibana + - xpack-watcher + - elastalert + - elastalert-dsl +logsources: + zeek: + product: zeek + index: 'logstash*' + zeek-category-accounting: + category: accounting + rewrite: + product: zeek + service: syslog + zeek-category-firewall: + category: firewall + conditions: + '@stream': conn + zeek-category-dns: + category: dns + conditions: + '@stream': dns + zeek-category-proxy: + category: proxy + rewrite: + product: zeek + service: http + zeek-category-webserver: + category: webserver + conditions: + '@stream': http + rewrite: + product: zeek + service: http + zeek-conn: + product: zeek + service: conn + conditions: + '@stream': conn + zeek-conn_long: + product: zeek + service: conn_long + conditions: + '@stream': conn_long + zeek-dce_rpc: + product: zeek + service: dce_rpc + conditions: + '@stream': dce_rpc + zeek-dns: + product: zeek + service: dns + conditions: + '@stream': dns + zeek-dnp3: + product: zeek + service: dnp3 + conditions: + '@stream': dnp3 + zeek-dpd: + product: zeek + service: dpd + conditions: + '@stream': dpd + zeek-files: + product: zeek + service: files + conditions: + '@stream': files + zeek-ftp: + product: zeek + service: ftp + conditions: + '@stream': ftp + zeek-gquic: + product: zeek + service: gquic + conditions: + '@stream': gquic + zeek-http: + product: zeek + service: http + conditions: + '@stream': http + zeek-http2: + product: zeek + service: http2 + conditions: + '@stream': http2 + zeek-intel: + product: zeek + service: intel + conditions: + '@stream': intel + zeek-irc: + product: zeek + service: irc + conditions: + '@stream': irc + zeek-kerberos: + product: zeek + service: kerberos + conditions: + '@stream': kerberos + zeek-known_certs: + product: zeek + service: known_certs + conditions: + '@stream': known_certs + zeek-known_hosts: + product: zeek + service: known_hosts + conditions: + '@stream': known_hosts + zeek-known_modbus: + product: zeek + service: known_modbus + conditions: + '@stream': known_modbus + zeek-known_services: + product: zeek + service: known_services + conditions: + '@stream': known_services + zeek-modbus: + product: zeek + service: modbus + conditions: + '@stream': modbus + zeek-modbus_register_change: + product: zeek + service: modbus_register_change + conditions: + '@stream': modbus_register_change + zeek-mqtt_connect: + product: zeek + service: mqtt_connect + conditions: + '@stream': mqtt_connect + zeek-mqtt_publish: + product: zeek + service: mqtt_publish + conditions: + '@stream': mqtt_publish + zeek-mqtt_subscribe: + product: zeek + service: mqtt_subscribe + conditions: + '@stream': mqtt_subscribe + zeek-mysql: + product: zeek + service: mysql + conditions: + '@stream': mysql + zeek-notice: + product: zeek + service: notice + conditions: + '@stream': notice + zeek-ntlm: + product: zeek + service: ntlm + conditions: + '@stream': ntlm + zeek-ntp: + product: zeek + service: ntp + conditions: + '@stream': ntp + zeek-ocsp: + product: zeek + service: ntp + conditions: + '@stream': ocsp + zeek-pe: + product: zeek + service: pe + conditions: + '@stream': pe + zeek-pop3: + product: zeek + service: pop3 + conditions: + '@stream': pop3 + zeek-radius: + product: zeek + service: radius + conditions: + '@stream': radius + zeek-rdp: + product: zeek + service: rdp + conditions: + '@stream': rdp + zeek-rfb: + product: zeek + service: rfb + conditions: + '@stream': rfb + zeek-sip: + product: zeek + service: sip + conditions: + '@stream': sip + zeek-smb_files: + product: zeek + service: smb_files + conditions: + '@stream': smb_files + zeek-smb_mapping: + product: zeek + service: smb_mapping + conditions: + '@stream': smb_mapping + zeek-smtp: + product: zeek + service: smtp + conditions: + '@stream': smtp + zeek-smtp_links: + product: zeek + service: smtp_links + conditions: + '@stream': smtp_links + zeek-snmp: + product: zeek + service: snmp + conditions: + '@stream': snmp + zeek-socks: + product: zeek + service: socks + conditions: + '@stream': socks + zeek-software: + product: zeek + service: software + conditions: + '@stream': software + zeek-ssh: + product: zeek + service: ssh + conditions: + '@stream': ssh + zeek-ssl: + product: zeek + service: ssl + conditions: + '@stream': ssl + zeek-tls: # In case people call it TLS even though orig log is called ssl + product: zeek + service: tls + conditions: + '@stream': ssl + zeek-syslog: + product: zeek + service: syslog + conditions: + '@stream': syslog + zeek-tunnel: + product: zeek + service: tunnel + conditions: + '@stream': tunnel + zeek-traceroute: + product: zeek + service: traceroute + conditions: + '@stream': traceroute + zeek-weird: + product: zeek + service: weird + conditions: + '@stream': weird + zeek-x509: + product: zeek + service: x509 + conditions: + '@stream': x509 + zeek-ip_search: + product: zeek + service: network + conditions: + '@stream': + - conn + - conn_long + - dce_rpc + - dhcp + - dnp3 + - dns + - ftp + - gquic + - http + - irc + - kerberos + - modbus + - mqtt_connect + - mqtt_publish + - mqtt_subscribe + - mysql + - ntlm + - ntp + - radius + - rfb + - sip + - smb_files + - smb_mapping + - smtp + - smtp_links + - snmp + - socks + - ssh + - tls #SSL + - tunnel + - weird +defaultindex: 'logstash-*' +fieldmappings: + # All Logs Applied Mapping & Taxonomy + dst_ip: id.resp_h + dst_port: id.resp_p + network_protocol: proto + src_ip: id.orig_h + src_port: id.orig_p + # DNS matching Taxonomy & DNS Category + answer: answers + #question_length: # Does not exist in open source version + record_type: qtype_name + #parent_domain: # Does not exist in open source version + # HTTP matching Taxonomy & Web/Proxy Category + cs-bytes: request_body_len + cs-cookie: cookie + r-dns: host + sc-bytes: response_body_len + sc-status: status_code + c-uri: uri + c-uri-extension: uri + c-uri-query: uri + c-uri-stem: uri + c-useragent: user_agent + cs-host: host + cs-method: method + cs-referrer: referrer + cs-version: version + # Temporary one off rule name fields + agent.version: version + c-cookie: cookie + c-ip: id.orig_h + cs-uri: uri + clientip: id.orig_h + clientIP: id.orig_h + dest_domain: + - query + - host + - server_name + dest_ip: id.resp_h + dest_port: id.resp_p + #TODO:WhatShouldThisBe?==dest: + #TODO:WhatShouldThisBe?==destination: + #TODO:WhatShouldThisBe?==Destination: + destination.hostname: + - query + - host + - server_name + DestinationAddress: + DestinationHostname: + - host + - query + - server_name + DestinationIp: id.resp_h + DestinationIP: id.resp_h + DestinationPort: id.resp_p + dst-ip: id.resp_h + dstip: id.resp_h + dstport: id.resp_p + Host: + - host + - query + - server_name + HostVersion: http.version + http_host: + - host + - query + - server_name + http_uri: uri + http_url: uri + http_user_agent: user_agent + http.request.url-query-params: uri + HttpMethod: method + in_url: uri + # parent_domain: # Not in open source zeek + post_url_parameter: uri + Request Url: uri + request_url: uri + request_URL: uri + RequestUrl: uri + #response: status_code + resource.url: uri + resource.URL: uri + sc_status: status_code + sender_domain: + - query + - server_name + service.response_code: status_code + source: id.orig_h + SourceAddr: id.orig_h + SourceAddress: id.orig_h + SourceIP: id.orig_h + SourceIp: id.orig_h + SourceNetworkAddress: id.orig_h + SourcePort: id.orig_p + srcip: id.orig_h + Status: status_code + status: status_code + url: uri + URL: uri + url_query: uri + url.query: uri + uri_path: uri + user_agent: user_agent + user_agent.name: user_agent + user-agent: user_agent + User-Agent: user_agent + useragent: user_agent + UserAgent: user_agent + User Agent: user_agent + web_dest: + - host + - query + - server_name + web.dest: + - host + - query + - server_name + Web.dest: + - host + - query + - server_name + web.host: + - host + - query + - server_name + Web.host: + - host + - query + - server_name + web_method: method + Web_method: method + web.method: method + Web.method: method + web_src: id.orig_h + web_status: status_code + Web_status: status_code + web.status: status_code + Web.status: status_code + web_uri: uri + web_url: uri + # Most are in ECS, but for things not using Elastic - these need renamed + destination.ip: id.resp_h + destination.port: id.resp_p + http.request.body.content: post_body + #source.domain: + source.ip: id.orig_h + source.port: id.orig_p \ No newline at end of file diff --git a/tools/config/humio.yml b/tools/config/humio.yml new file mode 100644 index 000000000..a25df0158 --- /dev/null +++ b/tools/config/humio.yml @@ -0,0 +1,97 @@ +title: Humio log source conditions +order: 20 +backends: + - humio + +fieldmappings: + EventID: winlog.event_id + Event_ID: winlog.event_id + eventId: winlog.event_id + event_id: winlog.event_id + event-id: winlog.event_id + eventid: winlog.event_id + AccessMask: winlog.event_data.AccessMask + AccountName: winlog.event_data.AccountName + AllowedToDelegateTo: winlog.event_data.AllowedToDelegateTo + AttributeLDAPDisplayName: winlog.event_data.AttributeLDAPDisplayName + AuditPolicyChanges: winlog.event_data.AuditPolicyChanges + AuthenticationPackageName: winlog.event_data.AuthenticationPackageName + CallingProcessName: winlog.event_data.CallingProcessName + CallTrace: winlog.event_data.CallTrace + Channel: winlog.channel + CommandLine: winlog.event_data.CommandLine + ComputerName: winlog.ComputerName + CurrentDirectory: winlog.event_data.CurrentDirectory + Description: winlog.event_data.Description + DestinationHostname: winlog.event_data.DestinationHostname + DestinationIp: winlog.event_data.DestinationIp + dst_ip: winlog.event_data.DestinationIp + DestinationIsIpv6: winlog.event_data.DestinationIsIpv6 + DestinationPort: winlog.event_data.DestinationPort + dst_port: winlog.event_data.DestinationPort + Details: winlog.event_data.Details + EngineVersion: winlog.event_data.EngineVersion + EventType: winlog.event_data.EventType + FailureCode: winlog.event_data.FailureCode + FileName: winlog.event_data.FileName + GrantedAccess: winlog.event_data.GrantedAccess + GroupName: winlog.event_data.GroupName + GroupSid: winlog.event_data.GroupSid + Hashes: winlog.event_data.Hashes + HiveName: winlog.event_data.HiveName + HostVersion: winlog.event_data.HostVersion + Image: winlog.event_data.Image + ImageLoaded: winlog.event_data.ImageLoaded + ImagePath: winlog.event_data.ImagePath + Imphash: winlog.event_data.Imphash + IpAddress: winlog.event_data.IpAddress + KeyLength: winlog.event_data.KeyLength + LogonProcessName: winlog.event_data.LogonProcessName + LogonType: winlog.event_data.LogonType + NewProcessName: winlog.event_data.NewProcessName + ObjectClass: winlog.event_data.ObjectClass + ObjectName: winlog.event_data.ObjectName + ObjectType: winlog.event_data.ObjectType + ObjectValueName: winlog.event_data.ObjectValueName + ParentCommandLine: winlog.event_data.ParentCommandLine + ParentProcessName: winlog.event_data.ParentProcessName + ParentImage: winlog.event_data.ParentImage + Path: winlog.event_data.Path + PipeName: winlog.event_data.PipeName + ProcessCommandLine: winlog.event_data.ProcessCommandLine + ProcessName: winlog.event_data.ProcessName + Properties: winlog.event_data.Properties + SecurityID: winlog.event_data.SecurityID + ServiceFileName: winlog.event_data.ServiceFileName + ServiceName: winlog.event_data.ServiceName + ShareName: winlog.event_data.ShareName + Signature: winlog.event_data.Signature + Source: winlog.event_data.Source + SourceImage: winlog.event_data.SourceImage + SourceIp: winlog.event_data.SourceIp + src_ip: winlog.event_data.SourceIp + StartModule: winlog.event_data.StartModule + Status: winlog.event_data.Status + SubjectUserName: winlog.event_data.SubjectUserName + SubjectUserSid: winlog.event_data.SubjectUserSid + TargetFilename: winlog.event_data.TargetFilename + Targetfilename: winlog.event_data.TargetFilename + TargetImage: winlog.event_data.TargetImage + TargetObject: winlog.event_data.TargetObject + TicketEncryptionType: winlog.event_data.TicketEncryptionType + TicketOptions: winlog.event_data.TicketOptions + User: winlog.event_data.User + WorkstationName: winlog.event_data.WorkstationName + # Channel: WLAN-Autoconfig AND EventID: 8001 + AuthenticationAlgorithm: winlog.event_data.AuthenticationAlgorithm + BSSID: winlog.event_data.BSSID + BSSType: winlog.event_data.BSSType + CipherAlgorithm: winlog.event_data.CipherAlgorithm + ConnectionId: winlog.event_data.ConnectionId + ConnectionMode: winlog.event_data.ConnectionMode + InterfaceDescription: winlog.event_data.InterfaceDescription + InterfaceGuid: winlog.event_data.InterfaceGuid + OnexEnabled: winlog.event_data.OnexEnabled + PHYType: winlog.event_data.PHYType + ProfileName: winlog.event_data.ProfileName + SSID: winlog.event_data.SSID diff --git a/tools/config/logstash-zeek-default-json.yml b/tools/config/logstash-zeek-default-json.yml new file mode 100644 index 000000000..8c24f4837 --- /dev/null +++ b/tools/config/logstash-zeek-default-json.yml @@ -0,0 +1,349 @@ +title: Zeek field mappings for default collection of JSON logs with no parsing/normalization done and sending into logstash-*index +order: 20 +backends: + - es-qs + - es-dsl + - elasticsearch-rule + - kibana + - xpack-watcher + - elastalert + - elastalert-dsl +logsources: + zeek: + product: zeek + index: 'logstash*' + zeek-category-accounting: + category: accounting + rewrite: + product: zeek + service: syslog + zeek-category-firewall: + category: firewall + conditions: + '@stream': conn + zeek-category-dns: + category: dns + conditions: + '@stream': dns + zeek-category-proxy: + category: proxy + rewrite: + product: zeek + service: http + zeek-category-webserver: + category: webserver + conditions: + '@stream': http + rewrite: + product: zeek + service: http + zeek-conn: + product: zeek + service: conn + conditions: + '@stream': conn + zeek-conn_long: + product: zeek + service: conn_long + conditions: + '@stream': conn_long + zeek-dce_rpc: + product: zeek + service: dce_rpc + conditions: + '@stream': dce_rpc + zeek-dns: + product: zeek + service: dns + conditions: + '@stream': dns + zeek-dnp3: + product: zeek + service: dnp3 + conditions: + '@stream': dnp3 + zeek-dpd: + product: zeek + service: dpd + conditions: + '@stream': dpd + zeek-files: + product: zeek + service: files + conditions: + '@stream': files + zeek-ftp: + product: zeek + service: ftp + conditions: + '@stream': ftp + zeek-gquic: + product: zeek + service: gquic + conditions: + '@stream': gquic + zeek-http: + product: zeek + service: http + conditions: + '@stream': http + zeek-http2: + product: zeek + service: http2 + conditions: + '@stream': http2 + zeek-intel: + product: zeek + service: intel + conditions: + '@stream': intel + zeek-irc: + product: zeek + service: irc + conditions: + '@stream': irc + zeek-kerberos: + product: zeek + service: kerberos + conditions: + '@stream': kerberos + zeek-known_certs: + product: zeek + service: known_certs + conditions: + '@stream': known_certs + zeek-known_hosts: + product: zeek + service: known_hosts + conditions: + '@stream': known_hosts + zeek-known_modbus: + product: zeek + service: known_modbus + conditions: + '@stream': known_modbus + zeek-known_services: + product: zeek + service: known_services + conditions: + '@stream': known_services + zeek-modbus: + product: zeek + service: modbus + conditions: + '@stream': modbus + zeek-modbus_register_change: + product: zeek + service: modbus_register_change + conditions: + '@stream': modbus_register_change + zeek-mqtt_connect: + product: zeek + service: mqtt_connect + conditions: + '@stream': mqtt_connect + zeek-mqtt_publish: + product: zeek + service: mqtt_publish + conditions: + '@stream': mqtt_publish + zeek-mqtt_subscribe: + product: zeek + service: mqtt_subscribe + conditions: + '@stream': mqtt_subscribe + zeek-mysql: + product: zeek + service: mysql + conditions: + '@stream': mysql + zeek-notice: + product: zeek + service: notice + conditions: + '@stream': notice + zeek-ntlm: + product: zeek + service: ntlm + conditions: + '@stream': ntlm + zeek-ntp: + product: zeek + service: ntp + conditions: + '@stream': ntp + zeek-ocsp: + product: zeek + service: ntp + conditions: + '@stream': ocsp + zeek-pe: + product: zeek + service: pe + conditions: + '@stream': pe + zeek-pop3: + product: zeek + service: pop3 + conditions: + '@stream': pop3 + zeek-radius: + product: zeek + service: radius + conditions: + '@stream': radius + zeek-rdp: + product: zeek + service: rdp + conditions: + '@stream': rdp + zeek-rfb: + product: zeek + service: rfb + conditions: + '@stream': rfb + zeek-sip: + product: zeek + service: sip + conditions: + '@stream': sip + zeek-smb_files: + product: zeek + service: smb_files + conditions: + '@stream': smb_files + zeek-smb_mapping: + product: zeek + service: smb_mapping + conditions: + '@stream': smb_mapping + zeek-smtp: + product: zeek + service: smtp + conditions: + '@stream': smtp + zeek-smtp_links: + product: zeek + service: smtp_links + conditions: + '@stream': smtp_links + zeek-snmp: + product: zeek + service: snmp + conditions: + '@stream': snmp + zeek-socks: + product: zeek + service: socks + conditions: + '@stream': socks + zeek-software: + product: zeek + service: software + conditions: + '@stream': software + zeek-ssh: + product: zeek + service: ssh + conditions: + '@stream': ssh + zeek-ssl: + product: zeek + service: ssl + conditions: + '@stream': ssl + zeek-tls: # In case people call it TLS even though orig log is called ssl + product: zeek + service: tls + conditions: + '@stream': ssl + zeek-syslog: + product: zeek + service: syslog + conditions: + '@stream': syslog + zeek-tunnel: + product: zeek + service: tunnel + conditions: + '@stream': tunnel + zeek-traceroute: + product: zeek + service: traceroute + conditions: + '@stream': traceroute + zeek-weird: + product: zeek + service: weird + conditions: + '@stream': weird + zeek-x509: + product: zeek + service: x509 + conditions: + '@stream': x509 + zeek-ip_search: + product: zeek + service: network + conditions: + '@stream': + - conn + - conn_long + - dce_rpc + - dhcp + - dnp3 + - dns + - ftp + - gquic + - http + - irc + - kerberos + - modbus + - mqtt_connect + - mqtt_publish + - mqtt_subscribe + - mysql + - ntlm + - ntp + - radius + - rfb + - sip + - smb_files + - smb_mapping + - smtp + - smtp_links + - snmp + - socks + - ssh + - tls #SSL + - tunnel + - weird +defaultindex: 'logstash-*' +fieldmappings: + # All Logs Applied Mapping & Taxonomy + clientip: id.resp_h + dst_ip: id.resp_h + dst_port: id.resp_p + network_protocol: proto + src_ip: id.orig_h + src_port: id.orig_p + # DNS matching Taxonomy & DNS Category + answer: answers + #question_length: # Does not exist in open source version + record_type: qtype_name + #parent_domain: # Does not exist in open source version + # HTTP matching Taxonomy & Web/Proxy Category + cs-bytes: request_body_len + cs-cookie: cookie + r-dns: host + sc-bytes: response_body_len + sc-status: status_code + c-uri: uri + c-uri-extension: uri + c-uri-query: uri + c-uri-stem: uri + c-useragent: user_agent + cs-host: host + cs-method: method + cs-referrer: referrer + cs-version: version \ No newline at end of file diff --git a/tools/config/powershell-windows-all.yml b/tools/config/powershell-windows-all.yml new file mode 100644 index 000000000..8464ade07 --- /dev/null +++ b/tools/config/powershell-windows-all.yml @@ -0,0 +1,62 @@ +logsources: + windows-application: + product: windows + service: application + conditions: + LogName: 'Application' + windows-security: + product: windows + service: security + conditions: + LogName: 'Security' + windows-system: + product: windows + service: system + conditions: + LogName: 'System' + windows-sysmon: + product: windows + service: sysmon + conditions: + LogName: 'Microsoft-Windows-Sysmon/Operational' + windows-powershell: + product: windows + service: powershell + conditions: + LogName: 'Microsoft-Windows-PowerShell/Operational' + windows-classicpowershell: + product: windows + service: powershell-classic + conditions: + LogName: 'Windows PowerShell' + windows-taskscheduler: + product: windows + service: taskscheduler + conditions: + LogName: 'Microsoft-Windows-TaskScheduler/Operational' + windows-wmi: + product: windows + service: wmi + conditions: + LogName: 'Microsoft-Windows-WMI-Activity/Operational' + windows-dns-server: + product: windows + service: dns-server + category: dns + conditions: + LogName: 'DNS Server' + windows-dns-server-audit: + product: windows + service: dns-server-audit + conditions: + LogName: 'Microsoft-Windows-DNS-Server/Audit' + windows-driver-framework: + product: windows + service: driver-framework + conditions: + LogName: 'Microsoft-Windows-DriverFrameworks-UserMode/Operational' + windows-ntlm: + product: windows + service: ntlm + conditions: + LogName: 'Microsoft-Windows-NTLM/Operational' diff --git a/tools/config/splunk-zeek.yml b/tools/config/splunk-zeek.yml index 1653f329c..b48626715 100644 --- a/tools/config/splunk-zeek.yml +++ b/tools/config/splunk-zeek.yml @@ -3,44 +3,461 @@ order: 20 backends: - splunk - splunkxml + - corelight_splunk logsources: + zeek-category-accounting: + category: accounting + rewrite: + product: zeek + service: syslog + zeek-category-firewall: + category: firewall + conditions: + sourcetype: 'bro:conn:json' + zeek-category-dns: + category: dns + conditions: + sourcetype: 'bro:dns:json' + zeek-category-proxy: + category: proxy + rewrite: + product: zeek + service: http + zeek-category-webserver: + category: webserver + conditions: + sourcetype: 'bro:http:json' + rewrite: + product: zeek + service: http zeek-conn: product: zeek service: conn conditions: sourcetype: 'bro:conn:json' + zeek-conn_long: + product: zeek + service: conn_long + conditions: + sourcetype: 'bro:conn_long:json' + zeek-dce_rpc: + product: zeek + service: dce_rpc + conditions: + sourcetype: 'bro:dce_rpc:json' zeek-dns: product: zeek service: dns conditions: sourcetype: 'bro:dns:json' + zeek-dnp3: + product: zeek + service: dnp3 + conditions: + sourcetype: 'bro:dnp3:json' + zeek-dpd: + product: zeek + service: dpd + conditions: + sourcetype: 'bro:dpd:json' zeek-files: product: zeek service: files conditions: sourcetype: 'bro:files:json' - zeek-kerberos: + zeek-ftp: product: zeek - service: kerberos + service: ftp conditions: - sourcetype: 'bro:kerberos:json' + sourcetype: 'bro:ftp:json' + zeek-gquic: + product: zeek + service: gquic + conditions: + sourcetype: 'bro:gquic:json' zeek-http: product: zeek service: http conditions: sourcetype: 'bro:http:json' + zeek-http2: + product: zeek + service: http2 + conditions: + sourcetype: 'bro:http2:json' + zeek-intel: + product: zeek + service: intel + conditions: + sourcetype: 'bro:intel:json' + zeek-irc: + product: zeek + service: irc + conditions: + sourcetype: 'bro:irc:json' + zeek-kerberos: + product: zeek + service: kerberos + conditions: + sourcetype: 'bro:kerberos:json' + zeek-known_certs: + product: zeek + service: known_certs + conditions: + sourcetype: 'bro:known_certs:json' + zeek-known_hosts: + product: zeek + service: known_hosts + conditions: + sourcetype: 'bro:known_hosts:json' + zeek-known_modbus: + product: zeek + service: known_modbus + conditions: + sourcetype: 'bro:known_modbus:json' + zeek-known_services: + product: zeek + service: known_services + conditions: + sourcetype: 'bro:known_services:json' + zeek-modbus: + product: zeek + service: modbus + conditions: + sourcetype: 'bro:modbus:json' + zeek-modbus_register_change: + product: zeek + service: modbus_register_change + conditions: + sourcetype: 'bro:modbus_register_change:json' + zeek-mqtt_connect: + product: zeek + service: mqtt_connect + conditions: + sourcetype: 'bro:mqtt_connect:json' + zeek-mqtt_publish: + product: zeek + service: mqtt_publish + conditions: + sourcetype: 'bro:mqtt_publish:json' + zeek-mqtt_subscribe: + product: zeek + service: mqtt_subscribe + conditions: + sourcetype: 'bro:mqtt_subscribe:json' + zeek-mysql: + product: zeek + service: mysql + conditions: + sourcetype: 'bro:mysql:json' + zeek-notice: + product: zeek + service: notice + conditions: + sourcetype: 'bro:notice:json' + zeek-ntlm: + product: zeek + service: ntlm + conditions: + sourcetype: 'bro:ntlm:json' + zeek-ntp: + product: zeek + service: ntp + conditions: + sourcetype: 'bro:ntp:json' + zeek-ocsp: + product: zeek + service: ntp + conditions: + sourcetype: 'bro:ocsp:json' + zeek-pe: + product: zeek + service: pe + conditions: + sourcetype: 'bro:pe:json' + zeek-pop3: + product: zeek + service: pop3 + conditions: + sourcetype: 'bro:pop3:json' + zeek-radius: + product: zeek + service: radius + conditions: + sourcetype: 'bro:radius:json' zeek-rdp: product: zeek service: rdp conditions: sourcetype: 'bro:rdp:json' + zeek-rfb: + product: zeek + service: rfb + conditions: + sourcetype: 'bro:rfb:json' + zeek-sip: + product: zeek + service: sip + conditions: + sourcetype: 'bro:sip:json' + zeek-smb_files: + product: zeek + service: smb_files + conditions: + sourcetype: 'bro:smb_files:json' + zeek-smb_mapping: + product: zeek + service: smb_mapping + conditions: + sourcetype: 'bro:smb_mapping:json' + zeek-smtp: + product: zeek + service: smtp + conditions: + sourcetype: 'bro:smtp:json' + zeek-smtp_links: + product: zeek + service: smtp_links + conditions: + sourcetype: 'bro:smtp_links:json' + zeek-snmp: + product: zeek + service: snmp + conditions: + sourcetype: 'bro:snmp:json' + zeek-socks: + product: zeek + service: socks + conditions: + sourcetype: 'bro:socks:json' + zeek-software: + product: zeek + service: software + conditions: + sourcetype: 'bro:software:json' + zeek-ssh: + product: zeek + service: ssh + conditions: + sourcetype: 'bro:ssh:json' zeek-ssl: product: zeek service: ssl conditions: sourcetype: 'bro:ssl:json' + zeek-tls: # In case people call it TLS even though log is called ssl + product: zeek + service: tls + conditions: + sourcetype: 'bro:ssl:json' + zeek-syslog: + product: zeek + service: syslog + conditions: + sourcetype: 'bro:syslog:json' + zeek-tunnel: + product: zeek + service: tunnel + conditions: + sourcetype: 'bro:tunnel:json' + zeek-traceroute: + product: zeek + service: traceroute + conditions: + sourcetype: 'bro:traceroute:json' + zeek-weird: + product: zeek + service: weird + conditions: + sourcetype: 'bro:weird:json' zeek-x509: product: zeek service: x509 conditions: sourcetype: 'bro:x509:json' + zeek-ip_search: + product: zeek + service: network + conditions: + sourcetype: + - 'bro:conn:json' + - 'bro:conn_long:json' + - 'bro:dce_rpc:json' + - 'bro:dhcp:json' + - 'bro:dnp3:json' + - 'bro:dns:json' + - 'bro:ftp:json' + - 'bro:gquic:json' + - 'bro:http:json' + - 'bro:irc:json' + - 'bro:kerberos:json' + - 'bro:modbus:json' + - 'bro:mqtt_connect:json' + - 'bro:mqtt_publish:json' + - 'bro:mqtt_subscribe:json' + - 'bro:mysql:json' + - 'bro:ntlm:json' + - 'bro:ntp:json' + - 'bro:radius:json' + - 'bro:rfb:json' + - 'bro:sip:json' + - 'bro:smb_files:json' + - 'bro:smb_mapping:json' + - 'bro:smtp:json' + - 'bro:smtp_links:json' + - 'bro:snmp:json' + - 'bro:socks:json' + - 'bro:ssh:json' + - 'bro:ssl:json' + - 'bro:tunnel:json' + - 'bro:weird:json' +fieldmappings: + # All Logs Applied Mapping & Taxonomy + dst_ip: id.resp_h + dst_port: id.resp_p + network_protocol: proto + src_ip: id.orig_h + src_port: id.orig_p + # DNS matching Taxonomy & DNS Category + answer: answers + #question_length: # Does not exist in open source version + record_type: qtype_name + #parent_domain: # Does not exist in open source version + # HTTP matching Taxonomy & Web/Proxy Category + cs-bytes: request_body_len + cs-cookie: cookie + r-dns: host + sc-bytes: response_body_len + sc-status: status_code + c-uri: uri + c-uri-extension: uri + c-uri-query: uri + c-uri-stem: uri + c-useragent: user_agent + cs-host: host + cs-method: method + cs-referrer: referrer + cs-version: version + # Temporary one off rule name fields + agent.version: version + c-cookie: cookie + c-ip: id.orig_h + cs-uri: uri + clientip: id.orig_h + clientIP: id.orig_h + dest_domain: + - query + - host + - server_name + dest_ip: id.resp_h + dest_port: id.resp_p + #TODO:WhatShouldThisBe?==dest: + #TODO:WhatShouldThisBe?==destination: + #TODO:WhatShouldThisBe?==Destination: + destination.hostname: + - query + - host + - server_name + DestinationAddress: + DestinationHostname: + - host + - query + - server_name + DestinationIp: id.resp_h + DestinationIP: id.resp_h + DestinationPort: id.resp_p + dst-ip: id.resp_h + dstip: id.resp_h + dstport: id.resp_p + Host: + - host + - query + - server_name + HostVersion: http.version + http_host: + - host + - query + - server_name + http_uri: uri + http_url: uri + http_user_agent: user_agent + http.request.url-query-params: uri + HttpMethod: method + in_url: uri + # parent_domain: # Not in open source zeek + post_url_parameter: uri + Request Url: uri + request_url: uri + request_URL: uri + RequestUrl: uri + #response: status_code + resource.url: uri + resource.URL: uri + sc_status: status_code + sender_domain: + - query + - server_name + service.response_code: status_code + source: id.orig_h + SourceAddr: id.orig_h + SourceAddress: id.orig_h + SourceIP: id.orig_h + SourceIp: id.orig_h + SourceNetworkAddress: id.orig_h + SourcePort: id.orig_p + srcip: id.orig_h + Status: status_code + status: status_code + url: uri + URL: uri + url_query: uri + url.query: uri + uri_path: uri + user_agent: user_agent + user_agent.name: user_agent + user-agent: user_agent + User-Agent: user_agent + useragent: user_agent + UserAgent: user_agent + User Agent: user_agent + web_dest: + - host + - query + - server_name + web.dest: + - host + - query + - server_name + Web.dest: + - host + - query + - server_name + web.host: + - host + - query + - server_name + Web.host: + - host + - query + - server_name + web_method: method + Web_method: method + web.method: method + Web.method: method + web_src: id.orig_h + web_status: status_code + Web_status: status_code + web.status: status_code + Web.status: status_code + web_uri: uri + web_url: uri + # Most are in ECS, but for things not using Elastic - these need renamed + destination.ip: id.resp_h + destination.port: id.resp_p + http.request.body.content: post_body + source.domain: + - host + - query + - server_name + source.ip: id.orig_h + source.port: id.orig_p \ No newline at end of file diff --git a/tools/config/winlogbeat-modules-enabled.yml b/tools/config/winlogbeat-modules-enabled.yml index 1bb9e2003..9f54bbe44 100644 --- a/tools/config/winlogbeat-modules-enabled.yml +++ b/tools/config/winlogbeat-modules-enabled.yml @@ -56,15 +56,17 @@ fieldmappings: AuthenticationPackageName: winlog.event_data.AuthenticationPackageName CallingProcessName: winlog.event_data.CallingProcessName CallTrace: winlog.event_data.CallTrace + Channel: winlog.channel CommandLine: process.args - ComputerName: winlog.computer_name - ContextInfo: winlog.event_data.ContextInfo + ComputerName: winlog.ComputerName CurrentDirectory: process.working_directory Description: winlog.event_data.Description DestinationHostname: destination.domain DestinationIp: destination.ip + dst_ip: destination.ip #DestinationIsIpv6: winlog.event_data.DestinationIsIpv6 #=gets deleted and not boolean...https://github.com/elastic/beats/blob/71eee76e7cfb8d5b18dfacad64864370ddb14ce7/x-pack/winlogbeat/module/sysmon/config/winlogbeat-sysmon.js#L278-L279 DestinationPort: destination.port + dst_port: destination.port DestinationPortName: network.protocol Details: winlog.event_data.Details EngineVersion: winlog.event_data.EngineVersion @@ -72,9 +74,14 @@ fieldmappings: FailureCode: winlog.event_data.FailureCode FileName: file.path GrantedAccess: winlog.event_data.GrantedAccess - GroupName: winlog.event_data.GroupName - GroupSid: winlog.event_data.GroupSid + GroupName: + - winlog.event_data.GroupName + - group.name + GroupSid: + - group.id + - winlog.event_data.GroupSid Hashes: winlog.event_data.Hashes + file_hash: winlog.event_data.Hashes HiveName: winlog.event_data.HiveName HostVersion: winlog.event_data.HostVersion Image: process.executable @@ -109,7 +116,9 @@ fieldmappings: SourceHostname: source.domain SourceImage: process.executable SourceIp: source.ip + src_ip: source.ip SourcePort: source.port + src_port: source.port #SourceIsIpv6: winlog.event_data.SourceIsIpv6 #=gets deleted and not boolean...https://github.com/elastic/beats/blob/71eee76e7cfb8d5b18dfacad64864370ddb14ce7/x-pack/winlogbeat/module/sysmon/config/winlogbeat-sysmon.js#L278-L279 StartModule: winlog.event_data.StartModule Status: winlog.event_data.Status @@ -126,3 +135,16 @@ fieldmappings: TargetUserSid: user.id User: user.name WorkstationName: source.domain + # Channel: WLAN-Autoconfig AND EventID: 8001 + AuthenticationAlgorithm: winlog.event_data.AuthenticationAlgorithm + BSSID: winlog.event_data.BSSID + BSSType: winlog.event_data.BSSType + CipherAlgorithm: winlog.event_data.CipherAlgorithm + ConnectionId: winlog.event_data.ConnectionId + ConnectionMode: winlog.event_data.ConnectionMode + InterfaceDescription: winlog.event_data.InterfaceDescription + InterfaceGuid: winlog.event_data.InterfaceGuid + OnexEnabled: winlog.event_data.OnexEnabled + PHYType: winlog.event_data.PHYType + ProfileName: winlog.event_data.ProfileName + SSID: winlog.event_data.SSID \ No newline at end of file diff --git a/tools/sigma/backends/ala.py b/tools/sigma/backends/ala.py index ea5fd950c..e3f1fd9fc 100644 --- a/tools/sigma/backends/ala.py +++ b/tools/sigma/backends/ala.py @@ -13,22 +13,49 @@ # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see . - -import re, json +import os +import sys +import re +import json import xml.etree.ElementTree as xml -from ..config.mapping import ( +from sigma.config.mapping import ( SimpleFieldMapping, MultiFieldMapping, ConditionalFieldMapping ) -from ..parser.condition import SigmaAggregationParser -from ..parser.exceptions import SigmaParseError -from ..parser.modifiers.type import SigmaRegularExpressionModifier -from .base import SingleTextQueryBackend +from sigma.parser.condition import SigmaAggregationParser + +from sigma.parser.modifiers.type import SigmaRegularExpressionModifier +from sigma.backends.base import SingleTextQueryBackend + +from sigma.parser.modifiers.base import SigmaTypeModifier +from sigma.parser.modifiers.transform import SigmaContainsModifier, SigmaStartswithModifier, SigmaEndswithModifier from .data import sysmon_schema from .exceptions import NotSupportedError -class AzureLogAnalyticsBackend(SingleTextQueryBackend): +class DeepFieldMappingMixin(object): + + def fieldNameMapping(self, fieldname, value): + if isinstance(fieldname, str): + get_config = self.sigmaconfig.fieldmappings.get(fieldname) + if not get_config and '|' in fieldname: + fieldname = fieldname.split('|', 1)[0] + get_config = self.sigmaconfig.fieldmappings.get(fieldname) + if isinstance(get_config, ConditionalFieldMapping): + condition = self.sigmaconfig.fieldmappings.get(fieldname).conditions + for key, item in self.logsource.items(): + if condition.get(key) and condition.get(key, {}).get(item): + new_fieldname = condition.get(key, {}).get(item) + if any(new_fieldname): + return super().fieldNameMapping(new_fieldname[0], value) + return super().fieldNameMapping(fieldname, value) + + + def generate(self, sigmaparser): + self.logsource = sigmaparser.parsedyaml.get("logsource", {}) + return super().generate(sigmaparser) + +class AzureLogAnalyticsBackend(DeepFieldMappingMixin, SingleTextQueryBackend): """Converts Sigma rule into Azure Log Analytics Queries.""" identifier = "ala" active = True @@ -43,8 +70,7 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend): ) config_required = False - reEscape = re.compile('("|(?', val) val = re.sub('\\*', '.*', val) + if "\\" in val: + return "%s \"(?i)%s\"" % (op, val) + return "%s \"(?i)%s\"" % (op, val) + elif val.startswith("*") or val.endswith("*"): + op = "contains" + val = re.sub('([".^$]|(?![*?]))', '\g<1>', val) + val = re.sub('\\*', '', val) val = re.sub('\\?', '.', val) - if "\\" in val: - return "%s @\"%s\"" % (op, val) - else: # value possibly only starts and/or ends with *, use prefix/postfix match - if val.endswith("*") and val.startswith("*"): - op = "contains" - val = self.cleanValue(val[1:-1]) - elif val.endswith("*"): - op = "startswith" - val = self.cleanValue(val[:-1]) - elif val.startswith("*"): - op = "endswith" - val = self.cleanValue(val[1:]) - - if "\\" in val: - return "%s @\"%s\"" % (op, val) - + # if "\\" in val: + # return "%s @\"%s\"" % (op, val) + return "%s \"%s\"" % (op, val) + # elif "\\" in val: + # return "%s @\"%s\"" % (op, val) return "%s \"%s\"" % (op, val) def generate(self, sigmaparser): @@ -140,13 +164,11 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend): self.service = None detection = sigmaparser.parsedyaml.get("detection", {}) - is_parent_cmd = False if "keywords" in detection.keys(): return super().generate(sigmaparser) - if self.category == "process_creation": - self.table = "SysmonEvent" + self.table = "SecurityEvent" self.eventid = "1" elif self.service == "security": self.table = "SecurityEvent" @@ -154,6 +176,12 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend): self.table = "SysmonEvent" elif self.service == "powershell": self.table = "Event" + elif self.service == "office365": + self.table = "OfficeActivity" + elif self.service == "azuread": + self.table = "AuditLogs" + elif self.service == "azureactivity": + self.table = "AzureActivity" else: if self.service: if "-" in self.service: @@ -181,8 +209,8 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend): elif self.sysmon: parse_string = self.map_sysmon_schema(self.eventid) before = "%s | parse EventData with * %s | where " % (self.table, parse_string) - elif self.category == "process_creation" and not self._has_logsource_event_cond: - before = "%s | where EventID == \"%s\" | where " % (self.table, self.eventid) + # elif self.category == "process_creation" and not self._has_logsource_event_cond: + # before = "%s | where EventID == \"%s\" | where " % (self.table, self.eventid) else: before = "%s | where " % self.table return before @@ -193,6 +221,7 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend): and creates an appropriate table reference. """ key, value = node + key = self.fieldNameMapping(key, value) if type(value) == list: # handle map items with values list like multiple OR-chained conditions return "(" + self.generateORNode( [(key, v) for v in value] @@ -207,17 +236,26 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend): self.table = "SecurityEvent" elif self.service == "system": self.table = "Event" - elif type(value) in (str, int): # default value processing - mapping = (key, self.default_value_mapping) + return self.mapExpression % (key, value) + elif type(value) in [SigmaTypeModifier, SigmaContainsModifier, SigmaRegularExpressionModifier, SigmaStartswithModifier, SigmaEndswithModifier]: + return self.generateMapItemTypedNode(key, value) + elif type(value) in (str, int): # default value processing' + #default_filters = ["endswith", "contains", "startswith", "re"] + # if any([item for item in default_filters if item in key]): + # key = re.sub(key, default_filters, "") + # return self.regexExpression % (key, self.cleanValue(value)) + # else: + # value_mapping = self.default_value_mapping + value_mapping = self.default_value_mapping + mapping = (key, value_mapping) if len(mapping) == 1: mapping = mapping[0] if type(mapping) == str: return mapping elif callable(mapping): - conds = mapping(key, value) return self.generateSubexpressionNode( self.generateANDNode( - [cond for cond in mapping(key, value)] + [cond for cond in mapping(key, self.cleanValue(value))] ) ) elif len(mapping) == 2: @@ -226,12 +264,29 @@ class AzureLogAnalyticsBackend(SingleTextQueryBackend): if type(mapitem) == str: result.append(mapitem) elif callable(mapitem): - result.append(mapitem(val)) + result.append(mapitem(self.cleanValue(val))) return "{} {}".format(*result) else: raise TypeError("Backend does not support map values of type " + str(type(value))) + elif type(value) == list: + return self.generateMapItemListNode(key, value) - return super().generateMapItemNode(node) + elif value is None: + return self.nullExpression % (key, ) + else: + raise TypeError("Backend does not support map values of type " + str(type(value))) + + def generateMapItemTypedNode(self, fieldname, value): + return "%s %s" % (fieldname, self.generateTypedValueNode(value)) + + def generateTypedValueNode(self, node): + try: + val = str(node) + if "*" in val: + val = re.sub('\\*', '.*', val) + return self.typedValueExpression[type(node)] % (val) + except KeyError: + raise NotImplementedError("Type modifier '{}' is not supported by backend".format(node.identifier)) def generateAggregation(self, agg): if agg is None: @@ -325,35 +380,89 @@ class AzureAPIBackend(AzureLogAnalyticsBackend): def __init__(self, *args, **kwargs): """Initialize field mappings""" super().__init__(*args, **kwargs) + self.techniques = self._load_mitre_file("techniques") - def create_rule(self, config): - tags = config.get("tags", []) + def find_technique(self, key_ids): + for key_id in set(key_ids): + if not key_id: + continue + for technique in self.techniques: + if key_id == technique.get("technique_id", ""): + yield technique + + def _load_mitre_file(self, mitre_type): + try: + backend_dir = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "config", "mitre")) + path = os.path.join(backend_dir, "{}.json".format(mitre_type)) + with open(path) as config_file: + config = json.load(config_file) + return config + except (IOError, OSError) as e: + print("Failed to open {} configuration file '%s': %s".format(path, str(e)), file=sys.stderr) + return [] + except json.JSONDecodeError as e: + print("Failed to parse {} configuration file '%s' as valid YAML: %s" % (path, str(e)), file=sys.stderr) + return [] + + def skip_tactics_or_techniques(self, src_technics, src_tactics): + tactics = set() + technics = set() + + local_storage_techniques = {item["technique_id"]: item for item in self.find_technique(src_technics)} + + for key_id in src_technics: + src_tactic = local_storage_techniques.get(key_id, {}).get("tactic") + if not src_tactic: + continue + src_tactic = set(src_tactic) + + for item in src_tactics: + if item in src_tactic: + technics.add(key_id) + tactics.add(item) + + return sorted(tactics), sorted(technics) + + def parse_severity(self, old_severity): + if old_severity.lower() == "critical": + return "high" + return old_severity + + def get_tactics_and_techniques(self, tags): tactics = list() technics = list() + for tag in tags: tag = tag.replace("attack.", "") - if re.match("[tT][0-9]{4}", tag): + if re.match("[t][0-9]{4}", tag, re.IGNORECASE): technics.append(tag.title()) else: if "_" in tag: - tag_list = tag.split("_") - tag_list = [item.title() for item in tag_list] - tactics.append("".join(tag_list)) - else: - tactics.append(tag.title()) + tag = tag.replace("_", " ") + tag = tag.title() + tactics.append(tag) + + return tactics, technics + + def create_rule(self, config): + tags = config.get("tags", []) + + tactics, technics = self.get_tactics_and_techniques(tags) + tactics, technics = self.skip_tactics_or_techniques(technics, tactics) + tactics = list(map(lambda s: s.replace(" ", ""), tactics)) rule = { "displayName": "{} by {}".format(config.get("title"), config.get('author')), "description": "{} {}".format(config.get("description"), "Technique: {}.".format(",".join(technics))), - "severity": config.get("level", "medium"), + "severity": self.parse_severity(config.get("level", "medium")), "enabled": True, "query": config.get("translation"), "queryFrequency": "12H", "queryPeriod": "12H", "triggerOperator": "GreaterThan", - "triggerThreshold": 1, + "triggerThreshold": 0, "suppressionDuration": "12H", - "suppressionEnabled": False, + "suppressionEnabled": True, "tactics": tactics } return json.dumps(rule) @@ -365,3 +474,5 @@ class AzureAPIBackend(AzureLogAnalyticsBackend): configs.update({"translation": translation}) rule = self.create_rule(configs) return rule + else: + raise NotSupportedError("No table could be determined from Sigma rule") diff --git a/tools/sigma/backends/arcsight.py b/tools/sigma/backends/arcsight.py index 10a062732..6cd10709e 100644 --- a/tools/sigma/backends/arcsight.py +++ b/tools/sigma/backends/arcsight.py @@ -151,9 +151,8 @@ class ArcSightBackend(SingleTextQueryBackend): return "(" + self.orToken.join([self.generateNode(val) for val in new_value]) + ")" return "(" + self.orToken.join([self.generateNode(val) for val in node]) + ")" - -class ArcSightBackend(SingleTextQueryBackend): - """Converts Sigma rule into ArcSight saved search. Contributed by SOC Prime. https://socprime.com""" +class ArcSightESMBackend(SingleTextQueryBackend): + """Converts Sigma rule into ArcSight ESM saved search. Contributed by SOC Prime. https://socprime.com""" reEscape = re.compile('(["\\\()])') identifier = "arcsight-esm" active = True @@ -188,13 +187,11 @@ class ArcSightBackend(SingleTextQueryBackend): def generateCleanValueNodeLogsource(self, value): return self.valueExpression % (self.cleanValue(str(value))) - def CleanNode(self, node): if isinstance(node, str) and "*" in node and not node.startswith("*") and not node.endswith("*"): node = ["*{}*".format(x) for x in node.split('*') if x] return node - #Clearing values from special characters. def generateMapItemNode(self, node): key, value = node @@ -225,7 +222,7 @@ class ArcSightBackend(SingleTextQueryBackend): elif isinstance(value, str) and value.endswith("*"): return self.startsWithExpression % (key, self.generateValueNode(self.CleanNode(value))) else: - return self.generateValueNode(value) + return self.mapExpression % (key, self.generateValueNode(value)) elif isinstance(value, list): new_value = list() for item in value: @@ -245,8 +242,6 @@ class ArcSightBackend(SingleTextQueryBackend): else: raise TypeError("Backend does not support map values of type " + str(type(value))) - - # for keywords values with space def generateValueNode(self, node): if type(node) is int: diff --git a/tools/sigma/backends/base.py b/tools/sigma/backends/base.py index 4675b0197..40a5ef367 100644 --- a/tools/sigma/backends/base.py +++ b/tools/sigma/backends/base.py @@ -19,6 +19,7 @@ import sys import sigma import yaml +from sigma.backends.exceptions import NotSupportedError from .mixins import RulenameCommentMixin, QuoteCharMixin from sigma.parser.modifiers.base import SigmaTypeModifier @@ -306,3 +307,34 @@ class SingleTextQueryBackend(RulenameCommentMixin, BaseBackend, QuoteCharMixin): transformed from the original name given in the Sigma rule. """ return fieldname + +class CorelightQueryBackend: + + def generate(self, sigmaparser): + lgs = sigmaparser.parsedyaml.get("logsource") + allow_types = { + 'category': + [ + 'proxy', 'firewall', 'webserver', 'accounting', 'dns' + ], + 'product': + [ + 'zeek', 'apache', 'netflow', 'firewall' + ], + 'service': [ + 'radius', 'kerberos', 'pe', 'ntlm', 'sip', 'syslog', 'ntp', + 'mqtt_subscribe', 'smb_files', 'irc', 'http2', 'rfb', + 'tunnel', 'socks', 'mqtt_publish', 'network', 'weird', + 'known_certs', 'traceroute', 'modbus', 'smtp_links', + 'ssl', 'known_hosts', 'software', 'smtp', 'tls', 'intel', + 'ssh', 'dce_rpc', 'x509', 'known_services', 'http', 'files', + 'gquic', 'ftp', 'dns', 'conn', 'dnp3', 'rdp', 'dpd', + 'known_modbus', 'conn_long', 'modbus_register_change', + 'mqtt_connect', 'pop3', 'mysql', 'notice', 'snmp', 'smb_mapping' + ] + } + for logsource_type, value in lgs.items(): + if allow_types.get(logsource_type) and value.lower() in allow_types.get(logsource_type): + return super().generate(sigmaparser) + lgs_text = ", ".join(["%s: %s" % (key, lgs.get(key)) for key in lgs.keys()]) + raise NotSupportedError("Corelight backend not supported logsources: %s." % lgs_text) \ No newline at end of file diff --git a/tools/sigma/backends/carbonblack.py b/tools/sigma/backends/carbonblack.py index 1dfd68e78..a06af8263 100644 --- a/tools/sigma/backends/carbonblack.py +++ b/tools/sigma/backends/carbonblack.py @@ -1,5 +1,8 @@ import re - +import requests +import json +import os +from sigma.config.eventdict import event from fnmatch import fnmatch from sigma.backends.base import SingleTextQueryBackend @@ -45,7 +48,7 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB active = True #reEscape = re.compile("([\s+\\-=!(){}\\[\\]^\"~:/]|(?]") andToken = " AND " orToken = " OR " @@ -70,20 +73,46 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB def cleanValue(self, val): + if("[1 to *]" in val): + self.reEscape = re.compile("([()])") + else: + self.reEscape = re.compile("([\s\s+()])") + val = val.strip() val = super().cleanValue(val) if isinstance(val, str): + if val.startswith("*"): + val = val.replace("*", "",1) + if val.startswith("\\"): + val = val.replace("\\", "", 1) if val.startswith("*\\"): val = val.replace("*\\", "*") if val.startswith("*/"): val = val.replace("*/", "*") + if val.startswith("*"): + val = val.replace("*", "") if val.endswith("\\*"): val = val.replace("\\*", "*") if val.endswith("/*"): val = val.replace("/*", "*") + val = val.strip() return val + def cleanIPRange(self,value): + new_value = value + if type(new_value) is str and value.find('*') : + sub = value.count('.') + if(value[-2:] == '.*'): + value = value[:-2] + min_ip = value + '.0' * (4 - sub) + new_value = min_ip + '/' + str(8 * (4 - sub)) + elif type(new_value) is list: + for index, vl in enumerate(new_value): + new_value[index] = self.cleanIPRange(vl) + + return new_value + def generateValueNode(self, node): - result = super().generateValueNode(node) + result = self.valueExpression % (str(node)) if result == "" or result.isspace(): return '""' else: @@ -94,16 +123,21 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB def generateMapItemNode(self, node): fieldname, value = node + if(fieldname == "EventID" and (type(value) is str or type(value) is int )): + fieldname = self.generateEventKey(value) + value = self.generateEventValue(value) if fieldname.lower() in self.excluded_fields: return else: transformed_fieldname = self.fieldNameMapping(fieldname, value) + if(transformed_fieldname == "ipaddr"): + value = self.cleanIPRange(value) if self.mapListsSpecialHandling == False and type(value) in (str, int, list) or self.mapListsSpecialHandling == True and type(value) in (str, int): #return self.mapExpression % (transformed_fieldname, self.generateNode(value)) if isinstance(value, list): return self.generateNode([self.mapExpression % (transformed_fieldname, self.cleanValue(item)) for item in value]) elif isinstance(value, str) or isinstance(value, int): - return self.mapExpression % (transformed_fieldname, self.generateNode(value)) + return self.mapExpression % (transformed_fieldname, self.generateNode(self.cleanValue(value))) elif type(value) == list: return self.generateMapItemListNode(transformed_fieldname, value) elif isinstance(value, SigmaTypeModifier): @@ -118,22 +152,59 @@ class CarbonBlackQueryBackend(CarbonBlackWildcardHandlingMixin, SingleTextQueryB if expression: return "(%s%s)" % (self.notToken, expression) +# Function to upload watchlists through CB API + def postAPI(self,result,title,desc): + url = os.getenv("cbapi_watchlist") + body = { + "name":title, + "search_query":"q="+str(result), + "description":desc, + "index_type":"events" + } + header = { + "X-Auth-Token": os.getenv("APIToken") + } + print(title) + x = requests.post(url, data =json.dumps(body), headers = header, verify=False) + print(x.text) + + def generateEventKey(self, value): + if (value in event): + return event[value][0] + else: + return 'eventid' + + def generateEventValue(self, value): + if (value in event): + return event[value][1] + else: + return '' def generate(self, sigmaparser): """Method is called for each sigma rule and receives the parsed rule (SigmaParser)""" + title = sigmaparser.parsedyaml["title"] + desc = sigmaparser.parsedyaml["description"] try: self.category = sigmaparser.parsedyaml['logsource'].setdefault('category', None) self.counted = sigmaparser.parsedyaml.get('counted', None) self.excluded_fields = [item.lower() for item in sigmaparser.config.config.get("excludedfields", [])] except KeyError: self.category = None - if self.category == "process_creation": - for parsed in sigmaparser.condparsed: - query = self.generateQuery(parsed) - result = "" + for parsed in sigmaparser.condparsed: + query = self.generateQuery(parsed) + result = "" - if query is not None: - result += query - return result - else: - raise NotSupportedError("Not supported logsource category.") \ No newline at end of file + if query is not None: + result += query + # self.postAPI(result,title,desc) + return result + # if self.category == "process_creation": + # for parsed in sigmaparser.condparsed: + # query = self.generateQuery(parsed) + # result = "" + + # if query is not None: + # result += query + # return result + # else: + # raise NotSupportedError("Not supported logsource category.") diff --git a/tools/sigma/backends/discovery.py b/tools/sigma/backends/discovery.py index fdb2347f2..399ce79e6 100644 --- a/tools/sigma/backends/discovery.py +++ b/tools/sigma/backends/discovery.py @@ -25,7 +25,7 @@ from sigma.tools import getAllSubclasses, getClassDict def getBackendList(): """Return list of backend classes""" path = os.path.dirname(__file__) - return frozenset(getAllSubclasses(path, "backends", BaseBackend)) + return getAllSubclasses(path, "backends", BaseBackend) def getBackendDict(): return getClassDict(getBackendList()) diff --git a/tools/sigma/backends/elasticsearch.py b/tools/sigma/backends/elasticsearch.py index f1b55ef36..f94c9cf15 100644 --- a/tools/sigma/backends/elasticsearch.py +++ b/tools/sigma/backends/elasticsearch.py @@ -18,33 +18,92 @@ import json import re from fnmatch import fnmatch import sys +import os +from random import randrange import sigma import yaml from sigma.parser.modifiers.type import SigmaRegularExpressionModifier from sigma.parser.condition import ConditionOR, ConditionAND, NodeSubexpression -from .base import BaseBackend, SingleTextQueryBackend + +from sigma.config.mapping import ConditionalFieldMapping +from .base import BaseBackend, SingleTextQueryBackend, CorelightQueryBackend from .mixins import RulenameCommentMixin, MultiRuleOutputMixin from .exceptions import NotSupportedError + +class DeepFieldMappingMixin(object): + + def fieldNameMapping(self, fieldname, value): + if isinstance(fieldname, str): + get_config = self.sigmaconfig.fieldmappings.get(fieldname) + if not get_config and '|' in fieldname: + fieldname = fieldname.split('|', 1)[0] + get_config = self.sigmaconfig.fieldmappings.get(fieldname) + if isinstance(get_config, ConditionalFieldMapping): + condition = self.sigmaconfig.fieldmappings.get(fieldname).conditions + for key, item in self.logsource.items(): + if condition.get(key) and condition.get(key, {}).get(item): + new_fieldname = condition.get(key, {}).get(item) + if any(new_fieldname): + return super().fieldNameMapping(new_fieldname[0], value) + return super().fieldNameMapping(fieldname, value) + + + def generate(self, sigmaparser): + self.logsource = sigmaparser.parsedyaml.get("logsource", {}) + return super().generate(sigmaparser) + + + class ElasticsearchWildcardHandlingMixin(object): """ Determine field mapping to keyword subfields depending on existence of wildcards in search values. Further, provide configurability with backend parameters. """ options = SingleTextQueryBackend.options + ( - ("keyword_field", "keyword", "Keyword sub-field name", None), - ("keyword_blacklist", None, "Fields that don't have a keyword subfield (wildcards * and ? allowed)", None) + ("keyword_field", "keyword", "Keyword sub-field name (default is: '.keyword'). Set blank value if all keyword fields are the base(top-level) field. Additionally see 'keyword_base_fields' for more granular control of the base & subfield situation.", None), + ("analyzed_sub_field_name", "", "Analyzed sub-field name. By default analyzed field is the base field. Therefore, use this option to make the analyzed field a subfield. An example value would be '.text' ", None), + ("analyzed_sub_fields", None, "Fields that have an analyzed sub-field.", None), + ("keyword_base_fields", None, "Fields that the keyword is base (top-level) field. By default analyzed field is the base field. So use this option to change that logic. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None), + ("keyword_whitelist", None, "Fields to always set as keyword. Bypasses case insensitive options. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None), + ("keyword_blacklist", None, "Fields to never set as keyword (ie: always set as analyzed field). Bypasses case insensitive options. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None), + ("case_insensitive_whitelist", None, "Fields to make the values case insensitive regex. Automatically sets the field as a keyword. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None), + ("case_insensitive_blacklist", None, "Fields to exclude from being made into case insensitive regex. Valid options are: list of fields, single field. Also, wildcards * and ? allowed.", None) ) reContainsWildcard = re.compile("(?:(?\\*", value ) + # Make upper/lower + value = re.sub( r"[A-Za-z]", lambda x: "[" + x.group( 0 ).upper() + x.group( 0 ).lower() + "]", value ) + # Turn `*` into wildcard, only if odd number of '\'(because this would mean already escaped) + value = re.sub( r"(((?.*", value ) + # Escape additional values that are treated as specific "operators" within Elastic. (ie: @, ?, &, <, >, and ~) + # reference: https://www.elastic.co/guide/en/elasticsearch/reference/current/regexp-syntax.html#regexp-optional-operators + value = re.sub( r"(((?])", "\g<1>\\\\\g<4>", value ) + # Validate regex + try: + re.compile(value) + return {'is_regex': True, 'value': value} + # Regex failed + except re.error: + raise TypeError( "Regular expression validation error for: '%s')" %str(value) ) + else: + return { 'is_regex': False, 'value': value } + + +class ElasticsearchQuerystringBackend(DeepFieldMappingMixin, ElasticsearchWildcardHandlingMixin, SingleTextQueryBackend): """Converts Sigma rule into Elasticsearch query string. Only searches, no aggregations.""" identifier = "es-qs" active = True @@ -101,6 +259,11 @@ class ElasticsearchQuerystringBackend(ElasticsearchWildcardHandlingMixin, Single return '""' else: if self.matchKeyword: # don't quote search value on keyword field + if self.CaseInSensitiveField: + make_ci = self.makeCaseInSensitiveValue(result) + result = make_ci.get('value') + if make_ci.get('is_regex'): # Determine if still should be a regex + result = "/%s/" % result # Regex place holders for regex return result else: return "\"%s\"" % result @@ -133,7 +296,12 @@ class ElasticsearchQuerystringBackend(ElasticsearchWildcardHandlingMixin, Single else: return super().generateSubexpressionNode(node) -class ElasticsearchDSLBackend(RulenameCommentMixin, ElasticsearchWildcardHandlingMixin, BaseBackend): + +class ElasticsearchCorelightBackend(CorelightQueryBackend, ElasticsearchQuerystringBackend): + identifier = "corelight_es-qs" + + +class ElasticsearchDSLBackend(DeepFieldMappingMixin, RulenameCommentMixin, ElasticsearchWildcardHandlingMixin, BaseBackend): """ElasticSearch DSL backend""" identifier = 'es-dsl' active = True @@ -280,12 +448,12 @@ class ElasticsearchDSLBackend(RulenameCommentMixin, ElasticsearchWildcardHandlin self.queries[-1]['aggs'] = { count_agg_group_name: { "terms": { - "field": "{}.keyword".format(agg.groupfield) + "field": "{}".format(agg.groupfield) }, "aggs": { count_distinct_agg_name: { "cardinality": { - "field": "{}.keyword".format(agg.aggfield) + "field": "{}".format(agg.aggfield) } }, "limit": { @@ -304,7 +472,7 @@ class ElasticsearchDSLBackend(RulenameCommentMixin, ElasticsearchWildcardHandlin self.queries[-1]['aggs'] = { group_aggname: { 'terms': { - 'field': '%s' % (agg.groupfield + ".keyword") + 'field': '%s' % (agg.groupfield) }, 'aggs': { 'limit': { @@ -452,7 +620,8 @@ class KibanaBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin): if self.output_type == "import": # output format that can be imported via Kibana UI for item in self.kibanaconf: # JSONize kibanaSavedObjectMeta.searchSourceJSON item['_source']['kibanaSavedObjectMeta']['searchSourceJSON'] = json.dumps(item['_source']['kibanaSavedObjectMeta']['searchSourceJSON']) - return json.dumps(self.kibanaconf, indent=2) + if self.kibanaconf: + return json.dumps(self.kibanaconf, indent=2) elif self.output_type == "curl": for item in self.indexsearch: return item @@ -475,6 +644,11 @@ class KibanaBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin): def index_variable_name(self, index): return "index_" + index.replace("-", "__").replace("*", "X") + +class KibanaCorelightBackend(CorelightQueryBackend, KibanaBackend): + identifier = "corelight_kibana" + + class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin): """Converts Sigma Rule into X-Pack Watcher JSON for alerting""" identifier = "xpack-watcher" @@ -563,7 +737,7 @@ class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin) "aggs": { "agg": { "terms": { - "field": condition.parsedAgg.aggfield + ".keyword", + "field": condition.parsedAgg.aggfield, "size": 10, "order": { "_count": order @@ -581,7 +755,7 @@ class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin) "aggs": { "by": { "terms": { - "field": condition.parsedAgg.groupfield + ".keyword", + "field": condition.parsedAgg.groupfield, "size": 10, "order": { "_count": order @@ -781,7 +955,11 @@ class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin) raise NotImplementedError("Output type '%s' not supported" % self.output_type) return result -class ElastalertBackend(MultiRuleOutputMixin): +class XPackWatcherCorelightBackend(CorelightQueryBackend, XPackWatcherBackend): + identifier = "corelight_xpack-watcher" + + +class ElastalertBackend(DeepFieldMappingMixin, MultiRuleOutputMixin): """Elastalert backend""" active = True supported_alert_methods = {'email', 'http_post'} @@ -993,6 +1171,7 @@ class ElastalertBackendQs(ElastalertBackend, ElasticsearchQuerystringBackend): return [{ 'query' : { 'query_string' : { 'query' : super().generateQuery(parsed) } } }] + class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend): identifier = "elasticsearch-rule" active = True @@ -1013,16 +1192,19 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend): except (IOError, OSError) as e: print("Failed to open {} configuration file '%s': %s".format(path, str(e)), file=sys.stderr) return [] - except json.JSONDecoder as e: + except json.JSONDecodeError as e: print("Failed to parse {} configuration file '%s' as valid YAML: %s" % (path, str(e)), file=sys.stderr) return [] def generate(self, sigmaparser): translation = super().generate(sigmaparser) if translation: + index = sigmaparser.get_logsource().index + if len(index) == 0: + index = ["apm-*-transaction", "auditbeat-*", "endgame-*", "filebeat-*", "packetbeat-*", "winlogbeat-*"] configs = sigmaparser.parsedyaml configs.update({"translation": translation}) - rule = self.create_rule(configs) + rule = self.create_rule(configs, index) return rule @@ -1071,16 +1253,18 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend): elif level == "critical": return randrange(74,101) - def create_rule(self, configs): + def create_rule(self, configs, index): tags = configs.get("tags", []) tactics_list = list() technics_list = list() + new_tags = list() for tag in tags: tag = tag.replace("attack.", "") if re.match("[t][0-9]{4}", tag, re.IGNORECASE): tech = self.find_technique(tag.title()) if tech: + new_tags.append(tag.title()) technics_list.append(tech) else: if "_" in tag: @@ -1088,33 +1272,33 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend): tag_list = [item.title() for item in tag_list] tact = self.find_tactics(key_name=" ".join(tag_list)) if tact: + new_tags.append(" ".join(tag_list)) tactics_list.append(tact) elif re.match("[ta][0-9]{4}", tag, re.IGNORECASE): tact = self.find_tactics(key_id=tag.upper()) if tact: + new_tags.append(tag.upper()) tactics_list.append(tact) else: tact = self.find_tactics(key_name=tag.title()) if tact: + new_tags.append(tag.title()) tactics_list.append(tact) threat = self.create_threat_description(tactics_list=tactics_list, techniques_list=technics_list) - rule_id = configs.get("title", "").lower().replace(" ", "_") + rule_name = configs.get("title", "").lower() + rule_id = re.sub(re.compile('[()*+!,\[\].\s"]'), "_", rule_name) risk_score = self.map_risk_score(configs.get("level", "medium")) + references = configs.get("reference") + if references is None: + references = configs.get("references") rule = { "description": configs.get("description", ""), "enabled": True, - "false_positives": configs.get('falsepositives'), + "false_positives": configs.get('falsepositives', "Unkown"), "filters": [], "from": "now-360s", "immutable": False, - "index": [ - "apm-*-transaction*", - "auditbeat-*", - "endgame-*", - "filebeat-*", - "packetbeat-*", - "winlogbeat-*" - ], + "index": index, "interval": "5m", "rule_id": rule_id, "language": "lucene", @@ -1123,15 +1307,19 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend): "risk_score": risk_score, "name": configs.get("title", ""), "query":configs.get("translation"), - "references": configs.get("references"), "meta": { "from": "1m" }, "severity": configs.get("level", "medium"), - "tags": tags, + "tags": new_tags, "to": "now", "type": "query", "threat": threat, "version": 1 } - return json.dumps(rule) \ No newline at end of file + if references: + rule.update({"references": references}) + return json.dumps(rule) + +class ElasticSearchRuleCorelightBackend(CorelightQueryBackend, ElasticSearchRuleBackend): + identifier = "corelight_elasticsearch-rule" diff --git a/tools/sigma/backends/humio.py b/tools/sigma/backends/humio.py new file mode 100644 index 000000000..21577e151 --- /dev/null +++ b/tools/sigma/backends/humio.py @@ -0,0 +1,160 @@ +# Output backends for sigmac +# Copyright 2016-2018 Thomas Patzke, Florian Roth, Roey + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. + +# You should have received a copy of the GNU Lesser General Public License +# along with this program. If not, see . + +import re + +from sigma.parser.modifiers.type import SigmaRegularExpressionModifier + +from sigma.parser.condition import SigmaAggregationParser +from .base import SingleTextQueryBackend +from .mixins import MultiRuleOutputMixin + +class HumioBackend(SingleTextQueryBackend): + """Converts Sigma rule into Humio query.""" + identifier = "humio" + active = True + + reEscape = re.compile('("|(?. - -import re -import yaml -from collections import namedtuple -from .base import BaseBackend -from sigma.parser.modifiers.base import SigmaTypeModifier -from sigma.parser.modifiers.type import SigmaRegularExpressionModifier - -# A few helper functions for cases where field mapping cannot be done -# as easily one by one, or can be done more efficiently. -def _windowsEventLogFieldName(fieldName): - if 'EventID' == fieldName: - return 'Event/System/EventID' - return 'Event/EventData/%s' % (fieldName,) - -def _mapProcessCreationOperations(node): - # Here we fix some common pitfalls found in rules - # in a consistent fashion (already processed to D&R rule). - - # First fixup is looking for a specific path prefix - # based on a specific drive letter. There are many cases - # where the driver letter can change or where the early - # boot process refers to it as "\Device\HarddiskVolume1\". - if ("starts with" == node["op"] and - "event/FILE_PATH" == node["path"] and - node["value"].lower().startswith("c:\\")): - node["op"] = "matches" - node["re"] = "^(?:(?:.:)|(?:\\\\Device\\\\HarddiskVolume.))\\\\%s" % (re.escape(node["value"][3:]),) - del(node["value"]) - - return node - -# We support many different log sources so we keep different mapping depending -# on the log source and category. -# The mapping key is product/category/service. -# The mapping value is tuple like: -# - top-level parameters -# - pre-condition is a D&R rule node filtering relevant events. -# - field mappings is a dict with a mapping or a callable to convert the field name. -# Individual mapping values can also be callabled(fieldname, value) returning a new fieldname and value. -# - isAllStringValues is a bool indicating whether all values should be converted to string. -# - keywordField is the field name to alias for keywords if supported or None if not. -# - postOpMapper is a callback that can modify an operation once it has been generated. -SigmaLCConfig = namedtuple('SigmaLCConfig', [ - 'topLevelParams', - 'preConditions', - 'fieldMappings', - 'isAllStringValues', - 'keywordField', - 'postOpMapper', -]) -_allFieldMappings = { - "windows/process_creation/": SigmaLCConfig( - topLevelParams = { - "events": [ - "NEW_PROCESS", - "EXISTING_PROCESS", - ] - }, - preConditions = { - "op": "is windows", - }, - fieldMappings = { - "CommandLine": "event/COMMAND_LINE", - "Image": "event/FILE_PATH", - "ParentImage": "event/PARENT/FILE_PATH", - "ParentCommandLine": "event/PARENT/COMMAND_LINE", - "User": "event/USER_NAME", - "OriginalFileName": "event/ORIGINAL_FILE_NAME", - # Custom field names coming from somewhere unknown. - "NewProcessName": "event/FILE_PATH", - "ProcessCommandLine": "event/COMMAND_LINE", - # Another one-off command line. - "Command": "event/COMMAND_LINE", - }, - isAllStringValues = False, - keywordField = "event/COMMAND_LINE", - postOpMapper = _mapProcessCreationOperations - ), - "windows//": SigmaLCConfig( - topLevelParams = { - "target": "log", - "log type": "wel", - }, - preConditions = None, - fieldMappings = _windowsEventLogFieldName, - isAllStringValues = True, - keywordField = None, - postOpMapper = None - ), - "windows_defender//": SigmaLCConfig( - topLevelParams = { - "target": "log", - "log type": "wel", - }, - preConditions = None, - fieldMappings = _windowsEventLogFieldName, - isAllStringValues = True, - keywordField = None, - postOpMapper = None - ), - "dns//": SigmaLCConfig( - topLevelParams = { - "event": "DNS_REQUEST", - }, - preConditions = None, - fieldMappings = { - "query": "event/DOMAIN_NAME", - }, - isAllStringValues = False, - keywordField = None, - postOpMapper = None - ), - "linux//": SigmaLCConfig( - topLevelParams = { - "events": [ - "NEW_PROCESS", - "EXISTING_PROCESS", - ] - }, - preConditions = { - "op": "is linux", - }, - fieldMappings = { - "exe": "event/FILE_PATH", - "type": None, - }, - isAllStringValues = False, - keywordField = 'event/COMMAND_LINE', - postOpMapper = None - ), - "unix//": SigmaLCConfig( - topLevelParams = { - "events": [ - "NEW_PROCESS", - "EXISTING_PROCESS", - ] - }, - preConditions = { - "op": "is linux", - }, - fieldMappings = { - "exe": "event/FILE_PATH", - "type": None, - }, - isAllStringValues = False, - keywordField = 'event/COMMAND_LINE', - postOpMapper = None - ), - "netflow//": SigmaLCConfig( - topLevelParams = { - "event": "NETWORK_CONNECTIONS", - }, - preConditions = None, - fieldMappings = { - "destination.port": "event/NETWORK_ACTIVITY/DESTINATION/PORT", - "source.port": "event/NETWORK_ACTIVITY/SOURCE/PORT", - }, - isAllStringValues = False, - keywordField = None, - postOpMapper = None - ), - "/proxy/": SigmaLCConfig( - topLevelParams = { - "event": "HTTP_REQUEST", - }, - preConditions = None, - fieldMappings = { - "c-uri|contains": "event/URL", - "c-uri": "event/URL", - "URL": "event/URL", - "cs-uri-query": "event/URL", - "cs-uri-stem": "event/URL", - }, - isAllStringValues = False, - keywordField = None, - postOpMapper = None - ), -} - -class LimaCharlieBackend(BaseBackend): - """Converts Sigma rule into LimaCharlie D&R rules. Contributed by LimaCharlie. https://limacharlie.io""" - identifier = "limacharlie" - active = True - config_required = False - default_config = ["limacharlie"] - - def generate(self, sigmaparser): - # Take the log source information and figure out which set of mappings to use. - ruleConfig = sigmaparser.parsedyaml - ls_rule = ruleConfig['logsource'] - try: - category = ls_rule['category'] - except KeyError: - category = "" - try: - product = ls_rule['product'] - except KeyError: - product = "" - # try: - # service = ls_rule['service'] - # except KeyError: - # service = "" - - # Don't use service for now, most Windows Event Logs - # uses a different service with no category, since we - # treat all Windows Event Logs together we can ignore - # the service. - service = "" - - # See if we have a definition for the source combination. - mappingKey = "%s/%s/%s" % (product, category, service) - topFilter, preCond, mappings, isAllStringValues, keywordField, postOpMapper = _allFieldMappings.get(mappingKey, tuple([None, None, None, None, None, None])) - if mappings is None: - raise NotImplementedError("Log source %s/%s/%s not supported by backend." % (product, category, service)) - - # Field name conversions. - self._fieldMappingInEffect = mappings - - # LC event type pre-selector for the type of data. - self._preCondition = preCond - - # Are all the values treated as strings? - self._isAllStringValues = isAllStringValues - - # Are we supporting keywords full text search? - self._keywordField = keywordField - - # Call to fixup all operations after the fact. - self._postOpMapper = postOpMapper - - # Call the original generation code. - detectComponent = super().generate(sigmaparser) - - # We expect a string (yaml) as output, so if - # we get anything else we assume it's a core - # library value and just return it as-is. - if not isinstance( detectComponent, str): - return detectComponent - - # This redundant to deserialize it right after - # generating the yaml, but we try to use the parent - # official class code as much as possible for future - # compatibility. - detectComponent = yaml.safe_load(detectComponent) - - # Check that we got a proper node and not just a string - # which we don't really know what to do with. - if not isinstance(detectComponent, dict): - raise NotImplementedError("Selection combination not supported.") - - # Apply top level filter. - detectComponent.update(topFilter) - - # Now prepare the Response component. - respondComponents = [{ - "action": "report", - "name": ruleConfig["title"], - }] - - # Add a lot of the metadata available to the report. - if ruleConfig.get("tags", None) is not None: - respondComponents[0].setdefault("metadata", {})["tags"] = ruleConfig["tags"] - - if ruleConfig.get("description", None) is not None: - respondComponents[0].setdefault("metadata", {})["description"] = ruleConfig["description"] - - if ruleConfig.get("references", None) is not None: - respondComponents[0].setdefault("metadata", {})["references"] = ruleConfig["references"] - - if ruleConfig.get("level", None) is not None: - respondComponents[0].setdefault("metadata", {})["level"] = ruleConfig["level"] - - if ruleConfig.get("author", None) is not None: - respondComponents[0].setdefault("metadata", {})["author"] = ruleConfig["author"] - - if ruleConfig.get("falsepositives", None) is not None: - respondComponents[0].setdefault("metadata", {})["falsepositives"] = ruleConfig["falsepositives"] - - # Assemble it all as a single, complete D&R rule. - return yaml.safe_dump({ - "detect": detectComponent, - "respond": respondComponents, - }, default_flow_style = False) - - def generateQuery(self, parsed): - # We override the generateQuery function because - # we generate proper JSON structures internally - # and only convert to string (yaml) once the - # whole thing is assembled. - result = self.generateNode(parsed.parsedSearch) - - if self._preCondition is not None: - result = { - "op": "and", - "rules": [ - self._preCondition, - result, - ] - } - if self._postOpMapper is not None: - result = self._postOpMapper(result) - return yaml.safe_dump(result) - - def generateANDNode(self, node): - generated = [ self.generateNode(val) for val in node ] - filtered = [ g for g in generated if g is not None ] - if not filtered: - return None - - # Map any possible keywords. - filtered = self._mapKeywordVals(filtered) - - if 1 == len(filtered): - if self._postOpMapper is not None: - filtered[0] = self._postOpMapper(filtered[0]) - return filtered[0] - result = { - "op": "and", - "rules": filtered, - } - if self._postOpMapper is not None: - result = self._postOpMapper(result) - return result - - def generateORNode(self, node): - generated = [self.generateNode(val) for val in node] - filtered = [g for g in generated if g is not None] - if not filtered: - return None - - # Map any possible keywords. - filtered = self._mapKeywordVals(filtered) - - if 1 == len(filtered): - if self._postOpMapper is not None: - filtered[0] = self._postOpMapper(filtered[0]) - return filtered[0] - result = { - "op": "or", - "rules": filtered, - } - if self._postOpMapper is not None: - result = self._postOpMapper(result) - return result - - def generateNOTNode(self, node): - generated = self.generateNode(node.item) - if generated is None: - return None - if not isinstance(generated, dict): - raise NotImplementedError("Not operator not available on non-dict nodes.") - generated["not"] = not generated.get("not", False) - return generated - - def generateSubexpressionNode(self, node): - return self.generateNode(node.items) - - def generateListNode(self, node): - return [self.generateNode(value) for value in node] - - def generateMapItemNode(self, node): - fieldname, value = node - - fieldNameAndValCallback = None - - # The mapping can be a dictionary of mapping or a callable - # to get the correct value. - if callable(self._fieldMappingInEffect): - fieldname = self._fieldMappingInEffect(fieldname) - else: - try: - # The mapping can also be a callable that will - # return a mapped key AND value. - if callable(self._fieldMappingInEffect[fieldname]): - fieldNameAndValCallback = self._fieldMappingInEffect[fieldname] - else: - fieldname = self._fieldMappingInEffect[fieldname] - except: - raise NotImplementedError("Field name %s not supported by backend." % (fieldname,)) - - # If fieldname returned is None, it's a special case where we - # ignore the node. - if fieldname is None: - return None - - if isinstance(value, (int, str)): - if fieldNameAndValCallback is not None: - fieldname, value = fieldNameAndValCallback(fieldname, value) - op, newVal = self._valuePatternToLcOp(value) - newOp = { - "op": op, - "path": fieldname, - "case sensitive": False, - } - if op == "matches": - newOp["re"] = newVal - else: - newOp["value"] = newVal - if self._postOpMapper is not None: - newOp = self._postOpMapper(newOp) - return newOp - elif isinstance(value, list): - subOps = [] - for v in value: - if fieldNameAndValCallback is not None: - fieldname, v = fieldNameAndValCallback(fieldname, v) - op, newVal = self._valuePatternToLcOp(v) - newOp = { - "op": op, - "path": fieldname, - "case sensitive": False, - } - if op == "matches": - newOp["re"] = newVal - else: - newOp["value"] = newVal - if self._postOpMapper is not None: - newOp = self._postOpMapper(newOp) - subOps.append(newOp) - if 1 == len(subOps): - return subOps[0] - return { - "op": "or", - "rules": subOps - } - elif isinstance(value, SigmaTypeModifier): - if isinstance(value, SigmaRegularExpressionModifier): - if fieldNameAndValCallback is not None: - fieldname, value = fieldNameAndValCallback(fieldname, value) - result = { - "op": "matches", - "path": fieldname, - "re": re.compile(value), - } - if self._postOpMapper is not None: - result = self._postOpMapper(result) - return result - else: - raise TypeError("Backend does not support TypeModifier: %s" % (str(type(value)))) - elif value is None: - if fieldNameAndValCallback is not None: - fieldname, value = fieldNameAndValCallback(fieldname, value) - result = { - "op": "exists", - "not": True, - "path": fieldname, - } - if self._postOpMapper is not None: - result = self._postOpMapper(result) - return result - else: - raise TypeError("Backend does not support map values of type " + str(type(value))) - - def generateValueNode(self, node): - return node - - def _valuePatternToLcOp(self, val): - # Here we convert the string values supported by Sigma that - # can include wildcards into either proper values (string or int) - # or into altered values to be functionally equivalent using - # a few different LC D&R rule operators. - - # No point evaluating non-strings. - if not isinstance(val, str): - return ("is", str(val) if self._isAllStringValues else val) - - # Is there any wildcard in this string? If not, we can short circuit. - if "*" not in val and "?" not in val: - return ("is", val) - - # Now we do a small optimization for the shortcut operators - # available in LC. We try to see if the wildcards are around - # the main value, but NOT within. If that's the case we can - # use the "starts with", "ends with" or "contains" operators. - isStartsWithWildcard = False - isEndsWithWildcard = False - tmpVal = val - if tmpVal.startswith("*"): - isStartsWithWildcard = True - tmpVal = tmpVal[1:] - if tmpVal.endswith("*") and not (tmpVal.endswith("\\*") and not tmpVal.endswith("\\\\*")): - isEndsWithWildcard = True - if tmpVal.endswith("\\\\*"): - # An extra \ had to be there so it didn't escapte the - # *, but since we plan on removing the *, we can also - # remove one \. - tmpVal = tmpVal[:-2] - else: - tmpVal = tmpVal[:-1] - - # Check to see if there are any other wildcards. If there are - # we cannot use our shortcuts. - if "*" not in tmpVal and "?" not in tmpVal: - if isStartsWithWildcard and isEndsWithWildcard: - return ("contains", tmpVal) - - if isStartsWithWildcard: - return ("ends with", tmpVal) - - if isEndsWithWildcard: - return ("starts with", tmpVal) - - # This is messy, but it is accurate in generating a RE based on - # the simplified wildcard system, while also supporting the - # escaping of those wildcards. - segments = [] - tmpVal = val - while True: - nEscapes = 0 - for i in range(len(tmpVal)): - # We keep a running count of backslash escape - # characters we see so that if we meet a wildcard - # we can tell whether the wildcard is escaped - # (with odd number of escapes) or if it's just a - # backslash literal before a wildcard (even number). - if "\\" == tmpVal[i]: - nEscapes += 1 - continue - - if "*" == tmpVal[i]: - if 0 == nEscapes: - segments.append(re.escape(tmpVal[:i])) - segments.append(".*") - elif nEscapes % 2 == 0: - segments.append(re.escape(tmpVal[:i - nEscapes])) - segments.append(tmpVal[i - nEscapes:i]) - segments.append(".*") - else: - segments.append(re.escape(tmpVal[:i - nEscapes])) - segments.append(tmpVal[i - nEscapes:i + 1]) - tmpVal = tmpVal[i + 1:] - break - - if "?" == tmpVal[i]: - if 0 == nEscapes: - segments.append(re.escape(tmpVal[:i])) - segments.append(".") - elif nEscapes % 2 == 0: - segments.append(re.escape(tmpVal[:i - nEscapes])) - segments.append(tmpVal[i - nEscapes:i]) - segments.append(".") - else: - segments.append(re.escape(tmpVal[:i - nEscapes])) - segments.append(tmpVal[i - nEscapes:i + 1]) - tmpVal = tmpVal[i + 1:] - break - - nEscapes = 0 - else: - segments.append(re.escape(tmpVal)) - break - - val = ''.join(segments) - - return ("matches", val) - - def _mapKeywordVals(self, values): - # This function ensures that the list of values passed - # are proper D&R operations, if they are strings it indicates - # they were requested as keyword matches. We only support - # keyword matches when specified in the config. We generally just - # map them to the most common field in LC that makes sense. - mapped = [] - - for val in values: - # Non-keywords are just passed through. - if not isinstance(val, str): - mapped.append(val) - continue - - if self._keywordField is None: - raise NotImplementedError("Full-text keyboard searches not supported.") - - # This seems to be indicative only of "keywords" which are mostly - # representative of full-text searches. We don't suport that but - # in some data sources we can alias them to an actual field. - op, newVal = self._valuePatternToLcOp(val) - newOp = { - "op": op, - "path": self._keywordField, - } - if op == "matches": - newOp["re"] = newVal - else: - newOp["value"] = newVal - mapped.append(newOp) - - return mapped +# LimaCharlie backend for sigmac created by LimaCharlie.io +# Copyright 2019 Refraction Point, Inc + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. + +# You should have received a copy of the GNU Lesser General Public License +# along with this program. If not, see . + +import re +import yaml +from collections import namedtuple +from .base import BaseBackend +from sigma.parser.modifiers.base import SigmaTypeModifier +from sigma.parser.modifiers.type import SigmaRegularExpressionModifier + +# A few helper functions for cases where field mapping cannot be done +# as easily one by one, or can be done more efficiently. +def _windowsEventLogFieldName(fieldName): + if 'EventID' == fieldName: + return 'Event/System/EventID' + return 'Event/EventData/%s' % (fieldName,) + +def _mapProcessCreationOperations(node): + # Here we fix some common pitfalls found in rules + # in a consistent fashion (already processed to D&R rule). + + # First fixup is looking for a specific path prefix + # based on a specific drive letter. There are many cases + # where the driver letter can change or where the early + # boot process refers to it as "\Device\HarddiskVolume1\". + if ("starts with" == node["op"] and + "event/FILE_PATH" == node["path"] and + node["value"].lower().startswith("c:\\")): + node["op"] = "matches" + node["re"] = "^(?:(?:.:)|(?:\\\\Device\\\\HarddiskVolume.))\\\\%s" % (re.escape(node["value"][3:]),) + del(node["value"]) + + return node + +# We support many different log sources so we keep different mapping depending +# on the log source and category. +# The mapping key is product/category/service. +# The mapping value is tuple like: +# - top-level parameters +# - pre-condition is a D&R rule node filtering relevant events. +# - field mappings is a dict with a mapping or a callable to convert the field name. +# Individual mapping values can also be callabled(fieldname, value) returning a new fieldname and value. +# - isAllStringValues is a bool indicating whether all values should be converted to string. +# - keywordField is the field name to alias for keywords if supported or None if not. +# - postOpMapper is a callback that can modify an operation once it has been generated. +SigmaLCConfig = namedtuple('SigmaLCConfig', [ + 'topLevelParams', + 'preConditions', + 'fieldMappings', + 'isAllStringValues', + 'keywordField', + 'postOpMapper', +]) +_allFieldMappings = { + "windows/process_creation/": SigmaLCConfig( + topLevelParams = { + "events": [ + "NEW_PROCESS", + "EXISTING_PROCESS", + ] + }, + preConditions = { + "op": "is windows", + }, + fieldMappings = { + "CommandLine": "event/COMMAND_LINE", + "Image": "event/FILE_PATH", + "ParentImage": "event/PARENT/FILE_PATH", + "ParentCommandLine": "event/PARENT/COMMAND_LINE", + "User": "event/USER_NAME", + "OriginalFileName": "event/ORIGINAL_FILE_NAME", + # Custom field names coming from somewhere unknown. + "NewProcessName": "event/FILE_PATH", + "ProcessCommandLine": "event/COMMAND_LINE", + # Another one-off command line. + "Command": "event/COMMAND_LINE", + }, + isAllStringValues = False, + keywordField = "event/COMMAND_LINE", + postOpMapper = _mapProcessCreationOperations + ), + "windows//": SigmaLCConfig( + topLevelParams = { + "target": "log", + "log type": "wel", + }, + preConditions = None, + fieldMappings = _windowsEventLogFieldName, + isAllStringValues = True, + keywordField = None, + postOpMapper = None + ), + "windows_defender//": SigmaLCConfig( + topLevelParams = { + "target": "log", + "log type": "wel", + }, + preConditions = None, + fieldMappings = _windowsEventLogFieldName, + isAllStringValues = True, + keywordField = None, + postOpMapper = None + ), + "dns//": SigmaLCConfig( + topLevelParams = { + "event": "DNS_REQUEST", + }, + preConditions = None, + fieldMappings = { + "query": "event/DOMAIN_NAME", + }, + isAllStringValues = False, + keywordField = None, + postOpMapper = None + ), + "linux//": SigmaLCConfig( + topLevelParams = { + "events": [ + "NEW_PROCESS", + "EXISTING_PROCESS", + ] + }, + preConditions = { + "op": "is linux", + }, + fieldMappings = { + "exe": "event/FILE_PATH", + "type": None, + }, + isAllStringValues = False, + keywordField = 'event/COMMAND_LINE', + postOpMapper = None + ), + "unix//": SigmaLCConfig( + topLevelParams = { + "events": [ + "NEW_PROCESS", + "EXISTING_PROCESS", + ] + }, + preConditions = { + "op": "is linux", + }, + fieldMappings = { + "exe": "event/FILE_PATH", + "type": None, + }, + isAllStringValues = False, + keywordField = 'event/COMMAND_LINE', + postOpMapper = None + ), + "netflow//": SigmaLCConfig( + topLevelParams = { + "event": "NETWORK_CONNECTIONS", + }, + preConditions = None, + fieldMappings = { + "destination.port": "event/NETWORK_ACTIVITY/DESTINATION/PORT", + "source.port": "event/NETWORK_ACTIVITY/SOURCE/PORT", + }, + isAllStringValues = False, + keywordField = None, + postOpMapper = None + ), + "/proxy/": SigmaLCConfig( + topLevelParams = { + "event": "HTTP_REQUEST", + }, + preConditions = None, + fieldMappings = { + "c-uri|contains": "event/URL", + "c-uri": "event/URL", + "URL": "event/URL", + "cs-uri-query": "event/URL", + "cs-uri-stem": "event/URL", + }, + isAllStringValues = False, + keywordField = None, + postOpMapper = None + ), +} + +class LimaCharlieBackend(BaseBackend): + """Converts Sigma rule into LimaCharlie D&R rules. Contributed by LimaCharlie. https://limacharlie.io""" + identifier = "limacharlie" + active = True + config_required = False + default_config = ["limacharlie"] + + def generate(self, sigmaparser): + # Take the log source information and figure out which set of mappings to use. + ruleConfig = sigmaparser.parsedyaml + ls_rule = ruleConfig['logsource'] + try: + category = ls_rule['category'] + except KeyError: + category = "" + try: + product = ls_rule['product'] + except KeyError: + product = "" + # try: + # service = ls_rule['service'] + # except KeyError: + # service = "" + + # If there is a timeframe component, we do not currently + # support it for now. + if ruleConfig.get( 'detection', {} ).get( 'timeframe', None ) is not None: + raise NotImplementedError("Timeframes are not supported by backend.") + + # Don't use service for now, most Windows Event Logs + # uses a different service with no category, since we + # treat all Windows Event Logs together we can ignore + # the service. + service = "" + + # See if we have a definition for the source combination. + mappingKey = "%s/%s/%s" % (product, category, service) + topFilter, preCond, mappings, isAllStringValues, keywordField, postOpMapper = _allFieldMappings.get(mappingKey, tuple([None, None, None, None, None, None])) + if mappings is None: + raise NotImplementedError("Log source %s/%s/%s not supported by backend." % (product, category, service)) + + # Field name conversions. + self._fieldMappingInEffect = mappings + + # LC event type pre-selector for the type of data. + self._preCondition = preCond + + # Are all the values treated as strings? + self._isAllStringValues = isAllStringValues + + # Are we supporting keywords full text search? + self._keywordField = keywordField + + # Call to fixup all operations after the fact. + self._postOpMapper = postOpMapper + + # Call the original generation code. + detectComponent = super().generate(sigmaparser) + + # We expect a string (yaml) as output, so if + # we get anything else we assume it's a core + # library value and just return it as-is. + if not isinstance( detectComponent, str): + return detectComponent + + # This redundant to deserialize it right after + # generating the yaml, but we try to use the parent + # official class code as much as possible for future + # compatibility. + detectComponent = yaml.safe_load(detectComponent) + + # Check that we got a proper node and not just a string + # which we don't really know what to do with. + if not isinstance(detectComponent, dict): + raise NotImplementedError("Selection combination not supported.") + + # Apply top level filter. + detectComponent.update(topFilter) + + # Now prepare the Response component. + respondComponents = [{ + "action": "report", + "name": ruleConfig["title"], + }] + + # Add a lot of the metadata available to the report. + if ruleConfig.get("tags", None) is not None: + respondComponents[0].setdefault("metadata", {})["tags"] = ruleConfig["tags"] + + if ruleConfig.get("description", None) is not None: + respondComponents[0].setdefault("metadata", {})["description"] = ruleConfig["description"] + + if ruleConfig.get("references", None) is not None: + respondComponents[0].setdefault("metadata", {})["references"] = ruleConfig["references"] + + if ruleConfig.get("level", None) is not None: + respondComponents[0].setdefault("metadata", {})["level"] = ruleConfig["level"] + + if ruleConfig.get("author", None) is not None: + respondComponents[0].setdefault("metadata", {})["author"] = ruleConfig["author"] + + if ruleConfig.get("falsepositives", None) is not None: + respondComponents[0].setdefault("metadata", {})["falsepositives"] = ruleConfig["falsepositives"] + + # Assemble it all as a single, complete D&R rule. + return yaml.safe_dump({ + "detect": detectComponent, + "respond": respondComponents, + }, default_flow_style = False) + + def generateQuery(self, parsed): + # We override the generateQuery function because + # we generate proper JSON structures internally + # and only convert to string (yaml) once the + # whole thing is assembled. + result = self.generateNode(parsed.parsedSearch) + + if self._preCondition is not None: + result = { + "op": "and", + "rules": [ + self._preCondition, + result, + ] + } + if self._postOpMapper is not None: + result = self._postOpMapper(result) + return yaml.safe_dump(result) + + def generateANDNode(self, node): + generated = [ self.generateNode(val) for val in node ] + filtered = [ g for g in generated if g is not None ] + if not filtered: + return None + + # Map any possible keywords. + filtered = self._mapKeywordVals(filtered) + + if 1 == len(filtered): + if self._postOpMapper is not None: + filtered[0] = self._postOpMapper(filtered[0]) + return filtered[0] + result = { + "op": "and", + "rules": filtered, + } + if self._postOpMapper is not None: + result = self._postOpMapper(result) + return result + + def generateORNode(self, node): + generated = [self.generateNode(val) for val in node] + filtered = [g for g in generated if g is not None] + if not filtered: + return None + + # Map any possible keywords. + filtered = self._mapKeywordVals(filtered) + + if 1 == len(filtered): + if self._postOpMapper is not None: + filtered[0] = self._postOpMapper(filtered[0]) + return filtered[0] + result = { + "op": "or", + "rules": filtered, + } + if self._postOpMapper is not None: + result = self._postOpMapper(result) + return result + + def generateNOTNode(self, node): + generated = self.generateNode(node.item) + if generated is None: + return None + if not isinstance(generated, dict): + raise NotImplementedError("Not operator not available on non-dict nodes.") + generated["not"] = not generated.get("not", False) + return generated + + def generateSubexpressionNode(self, node): + return self.generateNode(node.items) + + def generateListNode(self, node): + return [self.generateNode(value) for value in node] + + def generateMapItemNode(self, node): + fieldname, value = node + + fieldNameAndValCallback = None + + # The mapping can be a dictionary of mapping or a callable + # to get the correct value. + if callable(self._fieldMappingInEffect): + fieldname = self._fieldMappingInEffect(fieldname) + else: + try: + # The mapping can also be a callable that will + # return a mapped key AND value. + if callable(self._fieldMappingInEffect[fieldname]): + fieldNameAndValCallback = self._fieldMappingInEffect[fieldname] + else: + fieldname = self._fieldMappingInEffect[fieldname] + except: + raise NotImplementedError("Field name %s not supported by backend." % (fieldname,)) + + # If fieldname returned is None, it's a special case where we + # ignore the node. + if fieldname is None: + return None + + if isinstance(value, (int, str)): + if fieldNameAndValCallback is not None: + fieldname, value = fieldNameAndValCallback(fieldname, value) + op, newVal = self._valuePatternToLcOp(value) + newOp = { + "op": op, + "path": fieldname, + "case sensitive": False, + } + if op == "matches": + newOp["re"] = newVal + else: + newOp["value"] = newVal + if self._postOpMapper is not None: + newOp = self._postOpMapper(newOp) + return newOp + elif isinstance(value, list): + subOps = [] + for v in value: + if fieldNameAndValCallback is not None: + fieldname, v = fieldNameAndValCallback(fieldname, v) + op, newVal = self._valuePatternToLcOp(v) + newOp = { + "op": op, + "path": fieldname, + "case sensitive": False, + } + if op == "matches": + newOp["re"] = newVal + else: + newOp["value"] = newVal + if self._postOpMapper is not None: + newOp = self._postOpMapper(newOp) + subOps.append(newOp) + if 1 == len(subOps): + return subOps[0] + return { + "op": "or", + "rules": subOps + } + elif isinstance(value, SigmaTypeModifier): + if isinstance(value, SigmaRegularExpressionModifier): + if fieldNameAndValCallback is not None: + fieldname, value = fieldNameAndValCallback(fieldname, value) + result = { + "op": "matches", + "path": fieldname, + "re": re.compile(value), + } + if self._postOpMapper is not None: + result = self._postOpMapper(result) + return result + else: + raise TypeError("Backend does not support TypeModifier: %s" % (str(type(value)))) + elif value is None: + if fieldNameAndValCallback is not None: + fieldname, value = fieldNameAndValCallback(fieldname, value) + result = { + "op": "exists", + "not": True, + "path": fieldname, + } + if self._postOpMapper is not None: + result = self._postOpMapper(result) + return result + else: + raise TypeError("Backend does not support map values of type " + str(type(value))) + + def generateValueNode(self, node): + return node + + def _valuePatternToLcOp(self, val): + # Here we convert the string values supported by Sigma that + # can include wildcards into either proper values (string or int) + # or into altered values to be functionally equivalent using + # a few different LC D&R rule operators. + + # No point evaluating non-strings. + if not isinstance(val, str): + return ("is", str(val) if self._isAllStringValues else val) + + # Is there any wildcard in this string? If not, we can short circuit. + if "*" not in val and "?" not in val: + return ("is", val) + + # Now we do a small optimization for the shortcut operators + # available in LC. We try to see if the wildcards are around + # the main value, but NOT within. If that's the case we can + # use the "starts with", "ends with" or "contains" operators. + isStartsWithWildcard = False + isEndsWithWildcard = False + tmpVal = val + if tmpVal.startswith("*"): + isStartsWithWildcard = True + tmpVal = tmpVal[1:] + if tmpVal.endswith("*") and not (tmpVal.endswith("\\*") and not tmpVal.endswith("\\\\*")): + isEndsWithWildcard = True + if tmpVal.endswith("\\\\*"): + # An extra \ had to be there so it didn't escapte the + # *, but since we plan on removing the *, we can also + # remove one \. + tmpVal = tmpVal[:-2] + else: + tmpVal = tmpVal[:-1] + + # Check to see if there are any other wildcards. If there are + # we cannot use our shortcuts. + if "*" not in tmpVal and "?" not in tmpVal: + if isStartsWithWildcard and isEndsWithWildcard: + return ("contains", tmpVal) + + if isStartsWithWildcard: + return ("ends with", tmpVal) + + if isEndsWithWildcard: + return ("starts with", tmpVal) + + # This is messy, but it is accurate in generating a RE based on + # the simplified wildcard system, while also supporting the + # escaping of those wildcards. + segments = [] + tmpVal = val + while True: + nEscapes = 0 + for i in range(len(tmpVal)): + # We keep a running count of backslash escape + # characters we see so that if we meet a wildcard + # we can tell whether the wildcard is escaped + # (with odd number of escapes) or if it's just a + # backslash literal before a wildcard (even number). + if "\\" == tmpVal[i]: + nEscapes += 1 + continue + + if "*" == tmpVal[i]: + if 0 == nEscapes: + segments.append(re.escape(tmpVal[:i])) + segments.append(".*") + elif nEscapes % 2 == 0: + segments.append(re.escape(tmpVal[:i - nEscapes])) + segments.append(tmpVal[i - nEscapes:i]) + segments.append(".*") + else: + segments.append(re.escape(tmpVal[:i - nEscapes])) + segments.append(tmpVal[i - nEscapes:i + 1]) + tmpVal = tmpVal[i + 1:] + break + + if "?" == tmpVal[i]: + if 0 == nEscapes: + segments.append(re.escape(tmpVal[:i])) + segments.append(".") + elif nEscapes % 2 == 0: + segments.append(re.escape(tmpVal[:i - nEscapes])) + segments.append(tmpVal[i - nEscapes:i]) + segments.append(".") + else: + segments.append(re.escape(tmpVal[:i - nEscapes])) + segments.append(tmpVal[i - nEscapes:i + 1]) + tmpVal = tmpVal[i + 1:] + break + + nEscapes = 0 + else: + segments.append(re.escape(tmpVal)) + break + + val = ''.join(segments) + + return ("matches", val) + + def _mapKeywordVals(self, values): + # This function ensures that the list of values passed + # are proper D&R operations, if they are strings it indicates + # they were requested as keyword matches. We only support + # keyword matches when specified in the config. We generally just + # map them to the most common field in LC that makes sense. + mapped = [] + + for val in values: + # Non-keywords are just passed through. + if not isinstance(val, str): + mapped.append(val) + continue + + if self._keywordField is None: + raise NotImplementedError("Full-text keyboard searches not supported.") + + # This seems to be indicative only of "keywords" which are mostly + # representative of full-text searches. We don't suport that but + # in some data sources we can alias them to an actual field. + op, newVal = self._valuePatternToLcOp(val) + newOp = { + "op": op, + "path": self._keywordField, + } + if op == "matches": + newOp["re"] = newVal + else: + newOp["value"] = newVal + mapped.append(newOp) + + return mapped diff --git a/tools/sigma/backends/wdatp.py b/tools/sigma/backends/mdatp.py similarity index 87% rename from tools/sigma/backends/wdatp.py rename to tools/sigma/backends/mdatp.py index 92f46331d..f31c5c01e 100644 --- a/tools/sigma/backends/wdatp.py +++ b/tools/sigma/backends/mdatp.py @@ -15,12 +15,28 @@ # along with this program. If not, see . import re +from functools import wraps from .base import SingleTextQueryBackend from .exceptions import NotSupportedError + +def wrapper(method): + @wraps(method) + def _impl(self, method_args): + key, value, *_ = method_args + if '.keyword' in key: + key = key.split('.keyword')[0] + if key not in self.skip_fields: + method_output = method(self, method_args) + return method_output + else: + return + return _impl + + class WindowsDefenderATPBackend(SingleTextQueryBackend): - """Converts Sigma rule into Windows Defender ATP Hunting Queries.""" - identifier = "wdatp" + """Converts Sigma rule into Microsoft Defender ATP Hunting Queries.""" + identifier = "mdatp" active = True config_required = False @@ -41,6 +57,16 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend): mapExpression = "%s == %s" mapListsSpecialHandling = True mapListValueExpression = "%s in %s" + + skip_fields = { + "Description", + "_exists_", + "FileVersion", + "Product", + "Company", + "ParentProcessName", + "ParentCommandLine" + } def __init__(self, *args, **kwargs): """Initialize field mappings""" @@ -52,11 +78,12 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend): # (replacement, ): Replaces field occurrence with static string "AccountName" : (self.id_mapping, self.default_value_mapping), "CommandLine" : ("ProcessCommandLine", self.default_value_mapping), - "ComputerName" : (self.id_mapping, self.default_value_mapping), + "DeviceName" : (self.id_mapping, self.default_value_mapping), "DestinationHostname" : ("RemoteUrl", self.default_value_mapping), "DestinationIp" : ("RemoteIP", self.default_value_mapping), "DestinationIsIpv6" : ("RemoteIP has \":\"", ), "DestinationPort" : ("RemotePort", self.default_value_mapping), + "Protocol" : ("RemoteProtocol", self.default_value_mapping), "Details" : ("RegistryValueData", self.default_value_mapping), "EventType" : ("ActionType", self.default_value_mapping), "Image" : ("FolderPath", self.default_value_mapping), @@ -137,20 +164,21 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend): self.service = None if (self.category, self.product, self.service) == ("process_creation", "windows", None): - self.table = "ProcessCreationEvents" + self.table = "DeviceProcessEvents" elif (self.category, self.product, self.service) == (None, "windows", "powershell"): - self.table = "MiscEvents" + self.table = "DeviceEvents" self.orToken = ", " return super().generate(sigmaparser) def generateBefore(self, parsed): if self.table is None: - raise NotSupportedError("No WDATP table could be determined from Sigma rule") - if self.table == "MiscEvents" and self.service == "powershell": + raise NotSupportedError("No MDATP table could be determined from Sigma rule") + if self.table == "DeviceEvents" and self.service == "powershell": return "%s | where tostring(extractjson('$.Command', AdditionalFields)) in~ " % self.table return "%s | where " % self.table + @wrapper def generateMapItemNode(self, node): """ ATP queries refer to event tables instead of Windows logging event identifiers. This method catches conditions that refer to this field @@ -165,26 +193,26 @@ class WindowsDefenderATPBackend(SingleTextQueryBackend): if self.product == "windows": if self.service == "sysmon" and value == 1 \ or self.service == "security" and value == 4688: # Process Execution - self.table = "ProcessCreationEvents" + self.table = "DeviceProcessEvents" return None elif self.service == "sysmon" and value == 3: # Network Connection - self.table = "NetworkCommunicationEvents" + self.table = "DeviceNetworkEvents" return None elif self.service == "sysmon" and value == 7: # Image Load - self.table = "ImageLoadEvents" + self.table = "DeviceImageLoadEvents" return None elif self.service == "sysmon" and value == 8: # Create Remote Thread - self.table = "MiscEvents" + self.table = "DeviceEvents" return "ActionType == \"CreateRemoteThreadApiCall\"" elif self.service == "sysmon" and value == 11: # File Creation - self.table = "FileCreationEvents" + self.table = "DeviceFileEvents" return None elif self.service == "sysmon" and value == 13 \ or self.service == "security" and value == 4657: # Set Registry Value - self.table = "RegistryEvents" + self.table = "DeviceRegistryEvents" return "ActionType == \"RegistryValueSet\"" elif self.service == "security" and value == 4624: - self.table = "LogonEvents" + self.table = "DeviceLogonEvents" return None elif type(value) in (str, int): # default value processing try: diff --git a/tools/sigma/backends/splunk.py b/tools/sigma/backends/splunk.py index 9d95d9eec..614173f1e 100644 --- a/tools/sigma/backends/splunk.py +++ b/tools/sigma/backends/splunk.py @@ -16,7 +16,7 @@ import re import sigma -from .base import SingleTextQueryBackend +from .base import SingleTextQueryBackend, CorelightQueryBackend from .mixins import MultiRuleOutputMixin class SplunkBackend(SingleTextQueryBackend): @@ -72,6 +72,7 @@ class SplunkBackend(SingleTextQueryBackend): def generate(self, sigmaparser): """Method is called for each sigma rule and receives the parsed rule (SigmaParser)""" columns = list() + mapped =None try: for field in sigmaparser.parsedyaml["fields"]: mapped = sigmaparser.config.get_fieldmapping(field).resolve_fieldname(field, sigmaparser) @@ -160,7 +161,7 @@ class SplunkXMLBackend(SingleTextQueryBackend, MultiRuleOutputMixin): query = self.generateQuery(parsed) if query is not None: self.queries += self.panel_pre - self.queries += self.getRuleName(sigmaparser) + self.queries += sigmaparser.parsedyaml.get("title") or "" self.queries += self.panel_inf query = query.replace("<", "<") query = query.replace(">", ">") @@ -170,3 +171,43 @@ class SplunkXMLBackend(SingleTextQueryBackend, MultiRuleOutputMixin): def finalize(self): self.queries += self.dash_suf return self.queries + + +class SplunkCorelightBackend(CorelightQueryBackend, SplunkBackend): + identifier = "corelight_splunk" + + + +class CrowdStrikeBackend(SplunkBackend): + """Converts Sigma rule into CrowdStrike Search Processing Language (SPL).""" + identifier = "crowdstrike" + + def generate(self, sigmaparser): + lgs = sigmaparser.parsedyaml.get("logsource") + if lgs.get("product") == "windows" and (lgs.get("service") == "sysmon" or lgs.get("category") == "process_creation"): + fieldmappings = sigmaparser.config.fieldmappings + detections = sigmaparser.definitions + all_fields = dict() + for det in detections.values(): + for field, value in det.items(): + if "|" in field: + field = field.split("|")[0] + if any([item for item in fieldmappings.keys() if field == item]): + if field == "EventID" and str(value) == str(1) and lgs.get("service") == "sysmon": + all_fields.update(det) + elif field != "EventID": + all_fields.update(det) + else: + raise NotImplementedError("Not supported fields!") + else: + raise NotImplementedError("Not supported fields!") + + table_fields = sigmaparser.parsedyaml.get("fields", []) + res_table_fields = [] + for fl in table_fields: + if fl in fieldmappings.keys(): + res_table_fields.append(fl) + sigmaparser.parsedyaml["fields"] = res_table_fields + return super().generate(sigmaparser) + else: + raise NotImplementedError("Not supported logsources!") \ No newline at end of file From e8b956f575c0af942d20ee3a6b951f52c04d3baa Mon Sep 17 00:00:00 2001 From: vh Date: Wed, 20 May 2020 12:35:00 +0300 Subject: [PATCH 2/5] Updated config --- tools/config/arcsight-zeek.yml | 135 ++++- tools/config/arcsight.yml | 130 ++++- tools/config/ecs-proxy.yml | 49 +- tools/config/ecs-zeek-corelight.yml | 356 ++++++++----- tools/config/humio.yml | 536 +++++++++++++++++++- tools/config/logstash-zeek-default-json.yml | 140 ++++- tools/config/qradar.yml | 140 +++-- tools/config/splunk-zeek.yml | 24 +- tools/config/winlogbeat-modules-enabled.yml | 1 - tools/config/winlogbeat-old.yml | 5 +- tools/config/winlogbeat.yml | 22 +- 11 files changed, 1324 insertions(+), 214 deletions(-) diff --git a/tools/config/arcsight-zeek.yml b/tools/config/arcsight-zeek.yml index e27316b45..f902641dd 100644 --- a/tools/config/arcsight-zeek.yml +++ b/tools/config/arcsight-zeek.yml @@ -15,12 +15,14 @@ logsources: service: syslog zeek-category-firewall: category: firewall - conditions: - deviceEventCategory: conn + rewrite: + product: zeek + service: conn zeek-category-dns: category: dns - conditions: - deviceEventCategory: dns + rewrite: + product: zeek + service: dns zeek-category-proxy: category: proxy rewrite: @@ -28,8 +30,6 @@ logsources: service: http zeek-category-webserver: category: webserver - conditions: - deviceEventCategory: http rewrite: product: zeek service: http @@ -321,7 +321,6 @@ fieldmappings: - destinationDnsDomain - destinationHost # All Logs Applied Mapping & Taxonomy - clientip: sourceAddress dst: destinationAddress dst_ip: destinationAddress dst_port: destinationPort @@ -499,7 +498,7 @@ fieldmappings: #service=socks: status_msg: - 'message' - #subject: + subject: - 'message' #service=known_certs: #service=sip: @@ -1050,4 +1049,122 @@ fieldmappings: - sourceAddress san.uri: - requestUrl - - requestUrlQuery \ No newline at end of file + - requestUrlQuery + # Few other variations of names from zeek source itself + id_orig_h: sourceAddress + id_orig_p: sourcePort + id_resp_h: destinationAddress + id_resp_p: destinationPort + # Temporary one off rule name fields + cs-uri: requestUrl + destination.domain: + destination.ip: destinationAddress + destination.port: destinationPort + http.response.status_code: deviceSeverity + #http.request.body.content + source.domain: + #sourceAddress: #TONOTE: is arcsight + source.port: sourcePort + agent.version: deviceCustomString2 + c-ip: sourceAddress + clientip: sourceAddress + clientIP: sourceAddress + dest_domain: + - url.domain + dest_ip: destinationAddress + dest_port: destinationPort + #TODO:WhatShouldThisBe?==dest: + #TODO:WhatShouldThisBe?==destination: + #TODO:WhatShouldThisBe?==Destination: + destination.hostname: destinationHostName + #DestinationAddress: #TONOTE: is arcsight + #DestinationHostname: #TONOTE: is arcsight + DestinationIp: destinationAddress + DestinationIP: destinationAddress + DestinationPort: destinationPort + dst-ip: destinationAddress + dstip: destinationAddress + dstport: destinationPort + Host: requestHost + #host: + HostVersion: deviceCustomString2 + http_host: destinationHostName + http_uri: requestUrl + http_url: requestUrl + http_user_agent: + - deviceCustomString5 + - requestClientApplication + http.request.url-query-params: + - requestUrl + - requestUrlQuery + HttpMethod: requestMethod + in_url: requestUrl + #parent_domain: + # - url.registered_domain + # - destination.registered_domain + post_url_parameter: requestUrl + Request Url: requestUrl + request_url: requestUrl + request_URL: requestUrl + RequestUrl: requestUrl + #response: http.response.status_code + resource.url: requestUrl + resource.URL: requestUrl + sc_status: deviceSeverity + sender_domain: message + service.response_code: deviceSeverity + SourceAddr: sourceAddress + SourceAddress: sourceAddress + SourceIP: sourceAddress + SourceIp: sourceAddress + SourceNetworkAddress: + - source.address + - sourceAddress + SourcePort: sourcePort + srcip: sourceAddress + Status: deviceSeverity + #status: deviceSeverity + url: requestUrl + URL: requestUrl + url_query: + - requestUrl + - requestUrlQuery + url.query: + - requestUrl + - requestUrlQuery + uri_path: requestUrl + #user_agent: user_agent.original + user_agent.name: + - deviceCustomString5 + - requestClientApplication + user-agent: + - deviceCustomString5 + - requestClientApplication + User-Agent: + - deviceCustomString5 + - requestClientApplication + useragent: + - deviceCustomString5 + - requestClientApplication + UserAgent: + - deviceCustomString5 + - requestClientApplication + User Agent: + - deviceCustomString5 + - requestClientApplication + web_dest: destinationHostName + web.dest: destinationHostName + Web.dest: destinationHostName + web.host: destinationHostName + Web.host: destinationHostName + web_method: requestMethod + Web_method: requestMethod + web.method: requestMethod + Web.method: requestMethod + web_src: sourceAddress + web_status: deviceSeverity + Web_status: deviceSeverity + web.status: deviceSeverity + Web.status: deviceSeverity + web_uri: requestUrl + web_url: requestUrl \ No newline at end of file diff --git a/tools/config/arcsight.yml b/tools/config/arcsight.yml index f6a9bc537..d9dd1d7b7 100644 --- a/tools/config/arcsight.yml +++ b/tools/config/arcsight.yml @@ -349,4 +349,132 @@ fieldmappings: keywords: - deviceCustomString1 ScriptBlockText: - - deviceCustomString1 \ No newline at end of file + - deviceCustomString1 + AccessMask: deviceCustomString1 + AccountName: deviceCustomString1 + AllowedToDelegateTo: deviceCustomString1 + AttributeLDAPDisplayName: deviceCustomString1 + AuditPolicyChanges: deviceCustomString1 + AuthenticationPackageName: deviceCustomString1 + CallingProcessName: deviceCustomString1 + Command: deviceCustomString1 + Command_Line: deviceCustomString1 + ComputerName: deviceCustomString1 + destination.domain: deviceCustomString1 + DestinationIP: deviceCustomString1 + EngineVersion: deviceCustomString1 + Event: deviceCustomString1 + event.category: deviceCustomString1 + event.raw: deviceCustomString1 + event_data.AccessMask: deviceCustomString1 + event_data.AccountName: deviceCustomString1 + event_data.AllowedToDelegateTo: deviceCustomString1 + event_data.AttributeLDAPDisplayName: deviceCustomString1 + event_data.AuditPolicyChanges: deviceCustomString1 + event_data.AuthenticationPackageName: deviceCustomString1 + event_data.CallingProcessName: deviceCustomString1 + event_data.CallTrace: deviceCustomString1 + event_data.CommandLine: deviceCustomString1 + event_data.ComputerName: deviceCustomString1 + event_data.CurrentDirectory: deviceCustomString1 + event_data.Description: deviceCustomString1 + event_data.DestinationHostname: deviceCustomString1 + event_data.DestinationIp: deviceCustomString1 + event_data.DestinationIsIpv6: deviceCustomString1 + event_data.DestinationPort: deviceCustomString1 + event_data.Details: deviceCustomString1 + event_data.EngineVersion: deviceCustomString1 + event_data.EventType: deviceCustomString1 + event_data.FailureCode: deviceCustomString1 + event_data.FileName: deviceCustomString1 + event_data.GrantedAccess: deviceCustomString1 + event_data.GroupName: deviceCustomString1 + event_data.GroupSid: deviceCustomString1 + event_data.Hashes: deviceCustomString1 + event_data.HiveName: deviceCustomString1 + event_data.HostVersion: deviceCustomString1 + event_data.Image: deviceCustomString1 + event_data.ImageLoaded: deviceCustomString1 + event_data.ImagePath: deviceCustomString1 + event_data.Imphash: deviceCustomString1 + event_data.IpAddress: deviceCustomString1 + event_data.KeyLength: deviceCustomString1 + event_data.LogonProcessName: deviceCustomString1 + event_data.LogonType: deviceCustomString1 + event_data.NewProcessName: deviceCustomString1 + event_data.ObjectClass: deviceCustomString1 + event_data.ObjectName: deviceCustomString1 + event_data.ObjectType: deviceCustomString1 + event_data.ObjectValueName: deviceCustomString1 + event_data.ParentCommandLine: deviceCustomString1 + event_data.ParentImage: deviceCustomString1 + event_data.ParentProcessName: deviceCustomString1 + event_data.Path: deviceCustomString1 + event_data.PipeName: deviceCustomString1 + event_data.ProcessCommandLine: deviceCustomString1 + event_data.ProcessName: deviceCustomString1 + event_data.Properties: deviceCustomString1 + event_data.SecurityID: deviceCustomString1 + event_data.ServiceFileName: deviceCustomString1 + event_data.ServiceName: deviceCustomString1 + event_data.ShareName: deviceCustomString1 + event_data.Signature: deviceCustomString1 + event_data.Source: deviceCustomString1 + event_data.SourceImage: deviceCustomString1 + event_data.StartModule: deviceCustomString1 + event_data.Status: deviceCustomString1 + event_data.SubjectUserName: deviceCustomString1 + event_data.SubjectUserSid: deviceCustomString1 + event_data.TargetFilename: deviceCustomString1 + event_data.TargetImage: deviceCustomString1 + event_data.TargetObject: deviceCustomString1 + event_data.TicketEncryptionType: deviceCustomString1 + event_data.TicketOptions: deviceCustomString1 + event_data.User: deviceCustomString1 + event_data.WorkstationName: deviceCustomString1 + FailureCode: deviceCustomString1 + GroupName: deviceCustomString1 + GroupSid: deviceCustomString1 + hashes: deviceCustomString1 + Header.Accept: deviceCustomString1 + HiveName: deviceCustomString1 + host.scan.vuln_name: deviceCustomString1 + HostVersion: deviceCustomString1 + ImagePath: deviceCustomString1 + Imphash: deviceCustomString1 + IpAddress: deviceCustomString1 + IpPort: deviceCustomString1 + KeyLength: deviceCustomString1 + log_name: deviceCustomString1 + LogonType: deviceCustomString1 + NewProcessName: deviceCustomString1 + ObjectClass: deviceCustomString1 + ObjectName: deviceCustomString1 + ObjectType: deviceCustomString1 + ObjectValueName: deviceCustomString1 + ParentProcessName: deviceCustomString1 + Path: deviceCustomString1 + ProcessCommandLine: deviceCustomString1 + ProcessName: deviceCustomString1 + Properties: deviceCustomString1 + resource.URL: deviceCustomString1 + SecurityEvent: deviceCustomString1 + SecurityID: deviceCustomString1 + SelectionURL: deviceCustomString1 + ServiceFileName: deviceCustomString1 + ServiceName: deviceCustomString1 + ShareName: deviceCustomString1 + Source: deviceCustomString1 + source_name: deviceCustomString1 + SourceIP: deviceCustomString1 + Status: deviceCustomString1 + SubjectDomainName: deviceCustomString1 + SubjectUserName: deviceCustomString1 + SubjectUserSid: deviceCustomString1 + SysmonEvent: deviceCustomString1 + TargetDomainName: deviceCustomString1 + TargetUserSid: deviceCustomString1 + TicketEncryptionType: deviceCustomString1 + TicketOptions: deviceCustomString1 + winlog.channel: deviceCustomString1 + WorkstationName: deviceCustomString1 \ No newline at end of file diff --git a/tools/config/ecs-proxy.yml b/tools/config/ecs-proxy.yml index 8dd4c4919..b618fc53f 100644 --- a/tools/config/ecs-proxy.yml +++ b/tools/config/ecs-proxy.yml @@ -1,16 +1,44 @@ -title: Elastic Common Schema mapping for proxy logs +title: Elastic Common Schema mapping for proxy and webserver logs including NSM logs (zeek/suricata) order: 20 backends: - es-qs - es-dsl + - elasticsearch-rule - kibana - xpack-watcher - elastalert - elastalert-dsl -logsources: - proxy: - category: proxy - index: filebeat-* +# logsources: + # proxy: + # category: proxy + # index: + # - "filebeat-*" + # - "*ecs-*" + #zeek-category-proxy: + # category: proxy + # rewrite: + # product: zeek + # service: http + #zeek-category-webserver: + # category: webserver + # conditions: + # event.dataset: http + # rewrite: + # product: zeek + # service: http + # zeek-http: + # product: zeek + # service: http + # conditions: + # event.dataset: http + # zeek-http2: + # product: zeek + # service: http2 + # conditions: + # event.dataset: http2 +defaultindex: + - filebeat-* +# logsourcemerging: or fieldmappings: # All Logs Applied Mapping & Taxonomy dst: @@ -48,6 +76,14 @@ fieldmappings: sc-bytes: http.response.body.bytes sc-status: http.response.status_code # Temporary one off rule name fields + destination.domain: + # destination.ip: + # destination.port: + # http.response.status_code + # http.request.body.content + # source.domain: + # source.ip: + # source.port: agent.version: http.version c-ip: - source.address @@ -65,6 +101,9 @@ fieldmappings: - destination.address - destination.ip dest_port: destination.port + #TODO:WhatShouldThisBe?==dest: + #TODO:WhatShouldThisBe?==destination: + #TODO:WhatShouldThisBe?==Destination: destination.hostname: - destination.domain - url.domain diff --git a/tools/config/ecs-zeek-corelight.yml b/tools/config/ecs-zeek-corelight.yml index 6c3dae8b0..57849671a 100644 --- a/tools/config/ecs-zeek-corelight.yml +++ b/tools/config/ecs-zeek-corelight.yml @@ -26,10 +26,14 @@ logsources: service: syslog zeek-category-firewall: category: firewall - conditions: - event.dataset: conn + rewrite: + product: zeek + service: conn zeek-category-dns: category: dns + rewrite: + product: zeek + service: dns conditions: event.dataset: dns zeek-category-proxy: @@ -39,8 +43,6 @@ logsources: service: http zeek-category-webserver: category: webserver - conditions: - event.dataset: http rewrite: product: zeek service: http @@ -395,151 +397,251 @@ fieldmappings: uid: log.id.uid uids: log.id.uids uuid: log.id.uuid - # Overlapping fields/mappings (aka: shared fields) + # Deep mappings / Overlapping fields/mappings (aka: shared fields) + #_action action: - #- smb.action - - '*.action' - #service=smb_files: smb.action - #service=mqtt: mqtt.action - #service=tunnel: tunnel.action + #- '*.action' + service=mqtt: mqtt.action + service=smb_files: smb.action + service=tunnel: tunnel.action + mqtt_action: smb.action + smb_action: smb.action + tunnel_action: tunnel.action + #_addl addl: - #- weird.addl - - '*.addl' - #service=dns: dns.addl - #service=weird: weird.addl + #- '*.addl' + service=dns: dns.addl + service=weird: weird.addl + dns_addl: dns.addl + weird_addl: weird.addl + #_analyzer analyzer: - #- dpd.analyzer - - '*.analyzer' - #service=dpd: dpd.analyzer - #service=files: files.analyzer + #- '*.analyzer' + service=dpd: dpd.analyzer + service=files: files.analyzer + dpd_analyzer: dpd.analyzer + files_analyzer: file.analyzer + #_arg arg: - #- ftp.arg - - '*.arg' - #service=ftp: ftp.arg - #service=ftp: pop3.arg - #service=msqyl: mysql.arg - #auth: - #service=rfb: rfb.auth #RFB does not exist in newer logs, so skipping to cover dns.auth + #- '*.arg' + service=ftp: ftp.arg + service=msqyl: mysql.arg + service=pop3: pop3.arg + ftp_arg: ftp.arg + mysql_arg: mysql.arg + pop3_arg: pop3.arg + #_auth + auth: + #- dns.auth + service=dns: dns.auth + service=rfb: rfb.auth + dns_auth: dns.auth + rfb_auth: rfb.auth + #_cipher cipher: - #- kerberos.cipher - - '*.client' - #service=kerberos: kerberos.cipher - #service=ssl: tls.cipher + #- '*.client' + service=kerberos: kerberos.cipher + service=ssl: tls.cipher + kerberos_cipher: kerberos.cipher + ssl_cipher: tls.cipher + tls_cipher: tls.cipher + #_client client: - #- ssh.client - - '*.client' - #service=kerberos: kerberos.client - #service=ssh: ssh.client + #- '*.client' + service=kerberos: kerberos.client + service=ssh: ssh.client + kerberos_client: kerberos.client + ssh_client: ssh.client + #_command command: - #- ftp.command - - '*.command' - #service=pop3: pop3.command - #service=ftp: ftp.command - #service=irc: irc.command + #- '*.command' + service=irc: irc.command + service=ftp: ftp.command + service=pop3: pop3.command + ftp_command: ftp.command + irc_command: irc.command + pop3_command: pop3.command + #_date date: - #- smtp.date - - '*.date' - #service=sip: sip.date - #service=smtp: smtp.date + #- '*.date' + service=sip: sip.date + service=smtp: smtp.date + sip_date: sip.date + smtp_date: smtp.date + #_duration duration: - - event.duration - #- '*.duration' - #service=conn: event.duration - #service=files: files.duration - #service=snmp: event.duration + #- event.duration + service=conn: event.duration + service=files: files.duration + service=snmp: event.duration + conn_duration: event.duration + files_duration: files.duration + snmp_duration: event.duration + #_from from: - #- smtp.from - - '*.from' - #service=kerberos: kerberos.from - #service=smtp: smtp.from + #- '*.from' + service=kerberos: kerberos.from + service=smtp: smtp.from + kerberos_from: kerberos.from + smtp_from: smtp.from + #_is_orig is_orig: - - '*.is_orig' - #service=file: file.is_orig - #service=pop3: pop3.is_orig + #- '*.is_orig' + service=file: file.is_orig + service=pop3: pop3.is_orig + files_is_orig: file.is_orig + pop3_is_orig: pop3.is_orig + #_local_orig local_orig: - - '*.local_orig' - #service=conn conn.local_orig - #service=files file.local_orig + #- '*.local_orig' + service=conn: conn.local_orig + service=files: file.local_orig + conn_local_orig: conn.local_orig + files_local_orig: file.local_orig + #_method method: - - http.request.method - #service=http: http.request.method - #service=sip: sip.method + #- http.request.method + service=http: http.request.method + service=sip: sip.method + http_method: http.request.method + sip_method: sip.method + #_msg msg: - - notice.msg - #service=notice: notice.msg - #service=pop3: pop3.msg + #- notice.msg + service=notice: notice.msg + service=pop3: pop3.msg + notice_msg: notice.msg + pop3_msg: pop3.msg + #_name name: - - file.name - #- '*.name' - #service=smb_files: file.name - #service=software: software.name - #service=weird: weird.name + #- file.name + service=smb_files: file.name + service=software: software.name + service=weird: weird.name + smb_files_name: file.name + software_name: software.name + weird_name: weird.name + #_path path: - - file.path - #- '*.path' - #service=smb_files: file.path - #service=smb_mapping: file.path - #service=smtp: smtp.path + #- file.path + service=smb_files: file.path + service=smb_mapping: file.path + service=smtp: smtp.path + smb_files_path: file.path + smb_mapping_path: file.path + smtp_path: smtp.path + #_reply_msg reply_msg: - #- ftp.reply_msg - - '*.reply_msg' - #service=ftp: ftp.reply_msg - #service=radius: radius.reply_msg + #- '*.reply_msg' + service=ftp: ftp.reply_msg + service=radius: radius.reply_msg + ftp_reply_msg: ftp.reply_msg + radius_reply_msg: radius.reply_msg + #_reply_to reply_to: - #- smtp.reply_to - - '*.reply_to' - #service=sip: sip.reply_to - #service=smtp: smtp.reply_to + #- '*.reply_to' + service=sip: sip.reply_to + service=smtp: smtp.reply_to + sip_reply_to: sip.reply_to + smtp_reply_to: smtp.reply_to + #_response_body_len response_body_len: - - http.response.body.bytes - #service=http: http.response.body.bytes - #service=sip: sip.response_body_len + #- http.response.body.bytes + service=http: http.response.body.bytes + service=sip: sip.response_body_len + http_response_body_len: http.response.body.bytes + sip_response_body_len: sip.response_body_len + #_request_body_len request_body_len: - - http.request.body.bytes - #service=http: http.response.body.bytes - #service=sip: sip.request_body_len + #- http.request.body.bytes + service=http: http.response.body.bytes + service=sip: sip.request_body_len + http_request_body_len: http.response.body.bytes + sip_request_body_len: sip.response_body_len + #_rtt + #rtt: + #- event.duration + #- 'zeek.*.rtt' + #service=dns: event.duration + #service=dce_rpc: event.duration + dns_rtt: event.duration + dce_rpc_rtt: event.duration + #_service service: - #- kerberos.service - - '*.service' - #service=kerberos: kerberos.service - #service=smb_mapping: smb.service + #- '*.service' + service=kerberos: kerberos.service + service=smb_mapping: smb.service + kerberos_service: kerberos.service + smb_mapping_kerberos: smb.service + #_status status: - #- socks.status - - '*.status' - #service=pop3: pop3.status - #service=mqtt: mqtt.status - #service=socks: socks.status + #- '*.status' + service=mqtt: mqtt.status + service=pop3: pop3.status + service=socks: socks.status + mqtt_status: mqtt.status + pop3_status: pop3.status + socks_status: socks.status + #_status_code status_code: - - 'http.response.status_code' - #service=http: http.response.status_code - #service=sip: sip.status_code + #- 'http.response.status_code' + service=http: http.response.status_code + service=sip: sip.status_code + http_status_code: http.response.status_code + sip_status_code: sip.status_code + #_status_msg status_msg: - - http.status_msg #- '*.status_msg' - #service=http: http.status_msg - #service=sip: sip.status_msg + service=http: http.status_msg + service=sip: sip.status_msg + http_status_msg: http.status_msg + sip_status_msg: sip.status_msg + #_subject subject: - #- smtp.subject - - '*.subject' - #service=known_certs: known_certs.subject - #service=sip: sip.subject - #service=smtp: smtp.subject - #service=ssl: tls.subject + #- '*.subject' + service=known_certs: known_certs.subject + service=sip: sip.subject + service=smtp: smtp.subject + service=ssl: tls.subject + known_certs_subject: known_certs.subject + sip_subject: sip.subject + smtp_subject: smtp.subject + ssl_subject: tls.subject + #_service + + #_trans_depth trans_depth: - #- http.trans_depth - - '*.trans_depth' - #service=http: http.trans_depth - #service=sip: sip.trans_depth - #service=smtp: smtp.trans_depth + #- '*.trans_depth' + service=http: http.trans_depth + service=sip: sip.trans_depth + service=smtp: smtp.trans_depth + http_trans_depth: http.trans_depth + sip_trans_depth: sip.trans_depth + smtp_trans_depth: smtp.trans_depth + #_user_agent + #user_agent: #already normalized + http_user_agent: user_agent.original + gquic_user_agent: user_agent.original + sip_user_agent: user_agent.original + smtp_user_agent: user_agent.original + #_version version: - #- tls.version - - '*.version' - #service=gquic: gquic.version - #service=ntp: ntp.version - #service=socks: socks.version - #service=snmp: snmp.version - #service=ssh: ssh.version - #service=tls: tls.version + #- '*.version' + service=gquic: gquic.version + service=http: http.version + service=ntp: ntp.version + service=socks: socks.version + service=snmp: snmp.version + service=ssh: ssh.version + service=tls: tls.version + gquic_version: gquic.version + http_version: http.version + ntp_version: ntp.version + socks_version: socks.version + snmp_version: snmp.version + ssh_version: ssh.version + ssl_version: tls.version + tls_version: tls.version # Conn and Conn Long cache_add_rx_ev: conn.cache_add_rx_ev cache_add_rx_mpg: conn.cache_add_rx_mpg @@ -594,7 +696,7 @@ fieldmappings: # DNS AA: dns.AA #addl: dns.addl - auth: dns.auth + #auth: dns.auth answers: dns.answers.name TTLs: dns.answers.ttl RA: dns.RA @@ -689,6 +791,7 @@ fieldmappings: uri_vars: http.uri_vars #user_agent: user_agent.original #username: source.user.name + #version: http.version # Intel file_mime_type: file.mime_type file_desc: intel.file_desc @@ -1062,6 +1165,11 @@ fieldmappings: san.email: x509.san.email san.ip: x509.san.ip san.uri: x509.san.url + # Few other variations of names from zeek source itself + id_orig_h: source.ip + id_orig_p: source.port + id_resp_h: destination.ip + id_resp_p: destination.port # Temporary one off rule name fields cs-uri: url.original # destination.domain: @@ -1087,7 +1195,7 @@ fieldmappings: destination.hostname: - destination.domain - url.domain - DestinationAddress: + DestinationAddress: destination.ip DestinationHostname: - destination.domain - url.domain @@ -1109,7 +1217,7 @@ fieldmappings: - url.domain http_uri: url.original http_url: url.original - http_user_agent: user_agent.original + #http_user_agent: user_agent.original http.request.url-query-params: url.original HttpMethod: http.request.method in_url: url.original diff --git a/tools/config/humio.yml b/tools/config/humio.yml index a25df0158..dce843f86 100644 --- a/tools/config/humio.yml +++ b/tools/config/humio.yml @@ -2,8 +2,368 @@ title: Humio log source conditions order: 20 backends: - humio - +logsources: + zeek: + product: zeek + zeek-category-accounting: + category: accounting + rewrite: + product: zeek + service: syslog + zeek-category-firewall: + category: firewall + rewrite: + product: zeek + service: conn + zeek-category-dns: + category: dns + rewrite: + product: zeek + service: dns + zeek-category-proxy: + category: proxy + rewrite: + product: zeek + service: http + zeek-category-webserver: + category: webserver + rewrite: + product: zeek + service: http + zeek-conn: + product: zeek + service: conn + conditions: + '@stream': conn + zeek-conn_long: + product: zeek + service: conn_long + conditions: + '@stream': conn_long + zeek-dce_rpc: + product: zeek + service: dce_rpc + conditions: + '@stream': dce_rpc + zeek-dns: + product: zeek + service: dns + conditions: + '@stream': dns + zeek-dnp3: + product: zeek + service: dnp3 + conditions: + '@stream': dnp3 + zeek-dpd: + product: zeek + service: dpd + conditions: + '@stream': dpd + zeek-files: + product: zeek + service: files + conditions: + '@stream': files + zeek-ftp: + product: zeek + service: ftp + conditions: + '@stream': ftp + zeek-gquic: + product: zeek + service: gquic + conditions: + '@stream': gquic + zeek-http: + product: zeek + service: http + conditions: + '@stream': http + zeek-http2: + product: zeek + service: http2 + conditions: + '@stream': http2 + zeek-intel: + product: zeek + service: intel + conditions: + '@stream': intel + zeek-irc: + product: zeek + service: irc + conditions: + '@stream': irc + zeek-kerberos: + product: zeek + service: kerberos + conditions: + '@stream': kerberos + zeek-known_certs: + product: zeek + service: known_certs + conditions: + '@stream': known_certs + zeek-known_hosts: + product: zeek + service: known_hosts + conditions: + '@stream': known_hosts + zeek-known_modbus: + product: zeek + service: known_modbus + conditions: + '@stream': known_modbus + zeek-known_services: + product: zeek + service: known_services + conditions: + '@stream': known_services + zeek-modbus: + product: zeek + service: modbus + conditions: + '@stream': modbus + zeek-modbus_register_change: + product: zeek + service: modbus_register_change + conditions: + '@stream': modbus_register_change + zeek-mqtt_connect: + product: zeek + service: mqtt_connect + conditions: + '@stream': mqtt_connect + zeek-mqtt_publish: + product: zeek + service: mqtt_publish + conditions: + '@stream': mqtt_publish + zeek-mqtt_subscribe: + product: zeek + service: mqtt_subscribe + conditions: + '@stream': mqtt_subscribe + zeek-mysql: + product: zeek + service: mysql + conditions: + '@stream': mysql + zeek-notice: + product: zeek + service: notice + conditions: + '@stream': notice + zeek-ntlm: + product: zeek + service: ntlm + conditions: + '@stream': ntlm + zeek-ntp: + product: zeek + service: ntp + conditions: + '@stream': ntp + zeek-ocsp: + product: zeek + service: ntp + conditions: + '@stream': ocsp + zeek-pe: + product: zeek + service: pe + conditions: + '@stream': pe + zeek-pop3: + product: zeek + service: pop3 + conditions: + '@stream': pop3 + zeek-radius: + product: zeek + service: radius + conditions: + '@stream': radius + zeek-rdp: + product: zeek + service: rdp + conditions: + '@stream': rdp + zeek-rfb: + product: zeek + service: rfb + conditions: + '@stream': rfb + zeek-sip: + product: zeek + service: sip + conditions: + '@stream': sip + zeek-smb_files: + product: zeek + service: smb_files + conditions: + '@stream': smb_files + zeek-smb_mapping: + product: zeek + service: smb_mapping + conditions: + '@stream': smb_mapping + zeek-smtp: + product: zeek + service: smtp + conditions: + '@stream': smtp + zeek-smtp_links: + product: zeek + service: smtp_links + conditions: + '@stream': smtp_links + zeek-snmp: + product: zeek + service: snmp + conditions: + '@stream': snmp + zeek-socks: + product: zeek + service: socks + conditions: + '@stream': socks + zeek-software: + product: zeek + service: software + conditions: + '@stream': software + zeek-ssh: + product: zeek + service: ssh + conditions: + '@stream': ssh + zeek-ssl: + product: zeek + service: ssl + conditions: + '@stream': ssl + zeek-tls: # In case people call it TLS even though orig log is called ssl + product: zeek + service: tls + conditions: + '@stream': ssl + zeek-syslog: + product: zeek + service: syslog + conditions: + '@stream': syslog + zeek-tunnel: + product: zeek + service: tunnel + conditions: + '@stream': tunnel + zeek-traceroute: + product: zeek + service: traceroute + conditions: + '@stream': traceroute + zeek-weird: + product: zeek + service: weird + conditions: + '@stream': weird + zeek-x509: + product: zeek + service: x509 + conditions: + '@stream': x509 + zeek-ip_search: + product: zeek + service: network + conditions: + '@stream': + - conn + - conn_long + - dce_rpc + - dhcp + - dnp3 + - dns + - ftp + - gquic + - http + - irc + - kerberos + - modbus + - mqtt_connect + - mqtt_publish + - mqtt_subscribe + - mysql + - ntlm + - ntp + - radius + - rfb + - sip + - smb_files + - smb_mapping + - smtp + - smtp_links + - snmp + - socks + - ssh + - tls #SSL + - tunnel + - weird fieldmappings: + # Deep mappings Taxonomy for overall/general fields + dst_ip: + product=windows: winlog.event_data.DestinationIp + product=zeek: id.resp_h + src_ip: + product=windows: winlog.event_data.SourceIp + product=zeek: id.orig_h + dst_port: + product=windows: winlog.event_data.DestinationPort + product=zeek: id.resp_p + src_port: + product=windows: winlog.event_data.SourcePort + product=zeek: id.orig_p + network_protocol: + product=zeek: proto + # Deep mappings Taxonomy for DNS Category and DNS service + answer: + product=zeek: answers + #question_length: # product=zeek: # Does not exist in open source version + record_type: + product=zeek: qtype_name + #parent_domain: #product=zeek: # Does not exist in open source version + # Deep mappings Taxonomy for HTTP, Webserver category, and Proxy category + cs-bytes: + product=zeek: request_body_len + cs-cookie: + product=zeek: cookie + r-dns: + product=zeek: host + sc-bytes: + product=zeek: response_body_len + sc-status: + product=zeek: status_code + c-uri: + product=zeek: uri + c-uri-extension: + product=zeek: uri + c-uri-query: + product=zeek: uri + c-uri-stem: + product=zeek: uri + c-useragent: + product=zeek: user_agent + cs-host: + product=zeek: host + cs-method: + product=zeek: method + cs-referrer: + product=zeek: referrer + cs-version: + product=zeek: version + # Windows / WEF / Winlogbeat EventID: winlog.event_id Event_ID: winlog.event_id eventId: winlog.event_id @@ -25,10 +385,8 @@ fieldmappings: Description: winlog.event_data.Description DestinationHostname: winlog.event_data.DestinationHostname DestinationIp: winlog.event_data.DestinationIp - dst_ip: winlog.event_data.DestinationIp DestinationIsIpv6: winlog.event_data.DestinationIsIpv6 DestinationPort: winlog.event_data.DestinationPort - dst_port: winlog.event_data.DestinationPort Details: winlog.event_data.Details EngineVersion: winlog.event_data.EngineVersion EventType: winlog.event_data.EventType @@ -69,7 +427,6 @@ fieldmappings: Source: winlog.event_data.Source SourceImage: winlog.event_data.SourceImage SourceIp: winlog.event_data.SourceIp - src_ip: winlog.event_data.SourceIp StartModule: winlog.event_data.StartModule Status: winlog.event_data.Status SubjectUserName: winlog.event_data.SubjectUserName @@ -95,3 +452,174 @@ fieldmappings: PHYType: winlog.event_data.PHYType ProfileName: winlog.event_data.ProfileName SSID: winlog.event_data.SSID + # Zeek Deep Mappings + # Temporary one off rule name fields + agent.version: + product=zeek: version + c-cookie: + product=zeek: cookie + c-ip: + product=zeek: id.orig_h + cs-uri: + product=zeek: uri + clientip: + product=zeek: id.orig_h + clientIP: + product=zeek: id.orig_h + dest_domain: + product=zeek: host + #- query + #- server_name + dest_ip: + product=zeek: id.resp_h + dest_port: + product=zeek: id.resp_p + #TODO:WhatShouldThisBe?==dest: + #TODO:WhatShouldThisBe?==destination: + #TODO:WhatShouldThisBe?==Destination: + destination.hostname: + product=zeek: host + #- query + #- server_name + DestinationAddress: + product=zeek: id.resp_h + dst-ip: + product=zeek: id.resp_h + dstip: + product=zeek: id.resp_h + dstport: + product=zeek: id.resp_p + Host: + product=zeek: host + #- query + #- server_name + http_host: + product=zeek: host + #- query + #- server_name + http_uri: + product=zeek: uri + http_url: + product=zeek: uri + http_user_agent: + product=zeek: user_agent + http.request.url-query-params: + product=zeek: uri + HttpMethod: + product=zeek: method + in_url: + product=zeek: uri + post_url_parameter: + product=zeek: uri + Request Url: + product=zeek: uri + request_url: + product=zeek: uri + request_URL: + product=zeek: uri + RequestUrl: + product=zeek: uri + response: + product=zeek: status_code + resource.url: + product=zeek: uri + resource.URL: + product=zeek: uri + sc_status: + product=zeek: status_code + service.response_code: + product=zeek: status_code + source: + product=zeek: id.orig_h + SourceAddr: + product=zeek: id.orig_h + SourceAddress: + product=zeek: id.orig_h + SourceIP: + product=zeek: id.orig_h + SourceNetworkAddress: + product=zeek: id.orig_h + SourcePort: + product=zeek: id.orig_p + srcip: + product=zeek: id.orig_h + status: + product=zeek: status_code + url: + product=zeek: uri + URL: + product=zeek: uri + url_query: + product=zeek: uri + url.query: + product=zeek: uri + uri_path: + product=zeek: uri + user_agent: + product=zeek: user_agent + user_agent.name: + product=zeek: user_agent + user-agent: + product=zeek: user_agent + User-Agent: + product=zeek: user_agent + useragent: + product=zeek: user_agent + UserAgent: + product=zeek: user_agent + User Agent: + product=zeek: user_agent + web_dest: + product=zeek: host + #- query + #- server_name + web.dest: + product=zeek: host + #- query + #- server_name + Web.dest: + product=zeek: host + #- query + #- server_name + web.host: + product=zeek: host + #- query + #- server_name + Web.host: + product=zeek: host + #- query + #- server_name + web_method: + product=zeek: method + Web_method: + product=zeek: method + web.method: + product=zeek: method + Web.method: + product=zeek: method + web_src: + product=zeek: id.orig_h + web_status: + product=zeek: status_code + Web_status: + product=zeek: status_code + web.status: + product=zeek: status_code + Web.status: + product=zeek: status_code + web_uri: + product=zeek: uri + web_url: + product=zeek: uri + # Already + destination.ip: + product=zeek: id.resp_h + destination.port: + product=zeek: id.resp_p + http.request.body.content: + product=zeek: post_body + #source.domain: + source.ip: + product=zeek: id.orig_h + source.port: + product=zeek: id.orig_p diff --git a/tools/config/logstash-zeek-default-json.yml b/tools/config/logstash-zeek-default-json.yml index 8c24f4837..df219c60a 100644 --- a/tools/config/logstash-zeek-default-json.yml +++ b/tools/config/logstash-zeek-default-json.yml @@ -19,12 +19,14 @@ logsources: service: syslog zeek-category-firewall: category: firewall - conditions: - '@stream': conn + rewrite: + product: zeek + service: conn zeek-category-dns: category: dns - conditions: - '@stream': dns + rewrite: + product: zeek + service: dns zeek-category-proxy: category: proxy rewrite: @@ -32,8 +34,6 @@ logsources: service: http zeek-category-webserver: category: webserver - conditions: - '@stream': http rewrite: product: zeek service: http @@ -321,7 +321,6 @@ logsources: defaultindex: 'logstash-*' fieldmappings: # All Logs Applied Mapping & Taxonomy - clientip: id.resp_h dst_ip: id.resp_h dst_port: id.resp_p network_protocol: proto @@ -346,4 +345,129 @@ fieldmappings: cs-host: host cs-method: method cs-referrer: referrer - cs-version: version \ No newline at end of file + cs-version: version + # Few other variations of names from zeek source itself + id_orig_h: id.orig_h + id_orig_p: id.orig_p + id_resp_h: id.resp_h + id_resp_p: id.resp_p + # Temporary one off rule name fields + agent.version: version + c-cookie: cookie + c-ip: id.orig_h + cs-uri: uri + clientip: id.orig_h + clientIP: id.orig_h + dest_domain: + - query + - host + - server_name + dest_ip: id.resp_h + dest_port: id.resp_p + #TODO:WhatShouldThisBe?==dest: + #TODO:WhatShouldThisBe?==destination: + #TODO:WhatShouldThisBe?==Destination: + destination.hostname: + - query + - host + - server_name + DestinationAddress: id.resp_h + DestinationHostname: + - host + - query + - server_name + DestinationIp: id.resp_h + DestinationIP: id.resp_h + DestinationPort: id.resp_p + dst-ip: id.resp_h + dstip: id.resp_h + dstport: id.resp_p + Host: + - host + - query + - server_name + HostVersion: http.version + http_host: + - host + - query + - server_name + http_uri: uri + http_url: uri + http_user_agent: user_agent + http.request.url-query-params: uri + HttpMethod: method + in_url: uri + # parent_domain: # Not in open source zeek + post_url_parameter: uri + Request Url: uri + request_url: uri + request_URL: uri + RequestUrl: uri + #response: status_code + resource.url: uri + resource.URL: uri + sc_status: status_code + sender_domain: + - query + - server_name + service.response_code: status_code + source: id.orig_h + SourceAddr: id.orig_h + SourceAddress: id.orig_h + SourceIP: id.orig_h + SourceIp: id.orig_h + SourceNetworkAddress: id.orig_h + SourcePort: id.orig_p + srcip: id.orig_h + Status: status_code + status: status_code + url: uri + URL: uri + url_query: uri + url.query: uri + uri_path: uri + user_agent: user_agent + user_agent.name: user_agent + user-agent: user_agent + User-Agent: user_agent + useragent: user_agent + UserAgent: user_agent + User Agent: user_agent + web_dest: + - host + - query + - server_name + web.dest: + - host + - query + - server_name + Web.dest: + - host + - query + - server_name + web.host: + - host + - query + - server_name + Web.host: + - host + - query + - server_name + web_method: method + Web_method: method + web.method: method + Web.method: method + web_src: id.orig_h + web_status: status_code + Web_status: status_code + web.status: status_code + Web.status: status_code + web_uri: uri + web_url: uri + # Most are in ECS, but for things not using Elastic - these need renamed + destination.ip: id.resp_h + destination.port: id.resp_p + http.request.body.content: post_body + #source.domain: + source.ip: id.orig_h + source.port: id.orig_p \ No newline at end of file diff --git a/tools/config/qradar.yml b/tools/config/qradar.yml index 1768f96bb..428a73cf7 100644 --- a/tools/config/qradar.yml +++ b/tools/config/qradar.yml @@ -1,52 +1,98 @@ title: QRadar backends: - - qradar + - qradar order: 20 logsources: - apache: - product: apache - conditions: - LOGSOURCETYPENAME(devicetype): ilike '%apache%' - - windows: - product: windows - conditions: - LOGSOURCETYPENAME(devicetype): 'Microsoft Windows Security Event Log' - - qflow: - product: qflow - index: flows - - netflow: - product: netflow - index: flows - - ipfix: - product: ipfix - index: flows - - flow: - category: flow - index: flows - + apache: + product: apache + index: apache + conditions: + LOGSOURCETYPENAME(devicetype): '*apache*' + windows: + product: windows + index: windows + conditions: + LOGSOURCETYPENAME(devicetype): '*Microsoft Windows Security Event Log*' + qflow: + product: qflow + index: flows + netflow: + product: netflow + index: flows + ipfix: + product: ipfix + index: flows + flow: + category: flow + index: flows fieldmappings: - EventID: - - Event ID Code - dst: - - destinationIP - dst_ip: - - destinationIP - src: - - sourceIP - src_ip: - - sourceIP - c-ip: sourceIP - cs-ip: sourceIP - c-uri: url - c-uri-extension: file_extension - c-useragent: user_agent - c-uri-query: uri_query - cs-method: Method - r-dns: FQDN - ClientIP: sourceIP - ServiceFileName: Service Name + event_id: EventID + EventID: EventID + dst: destinationip + dst_ip: destinationip + src: sourceip + src_ip: sourceip + c-ip: sourceip + cs-ip: sourceip + c-uri: URL + c-uri-extension: URL + c-useragent: user_agent + c-uri-query: uri_query + cs-method: Method + r-dns: FQDN + ClientIP: sourceip + ServiceFileName: ServiceFileName + event_data.CommandLine: Process CommandLine + CommandLine: Process CommandLine + file_hash: File Hash + hash: File Hash + #Message: search_payload + Event-ID: EventID + Event_ID: EventID + eventId: EventID + event-id: EventID + eventid: EventID + hashes: File Hash + url.query: URL + resource.URL: URL + event_data.CallingProcessName: CallingProcessName + event_data.ComputerName: Hostname/HOSTNAME + ComputerName: Hostname/HOSTNAME + event_data.DestinationHostname: Hostname/HOSTNAME + DestinationHostname: Hostname/HOSTNAME + event_data.DestinationIp: destinationip + event_data.DestinationPort: destinationip + event_data.Details: Target Details + Details: Target Details + event_data.FileName: Filename + event_data.Hashes: File Hash + Hashes: File Hash + event_data.Image: Image + event_data.ImageLoaded: LoadedImage + event_data.ImagePath: SourceImage + ImagePath: Image + event_data.Imphash: IMP Hash + Imphash: IMP Hash + event_data.ParentCommandLine: ParentCommandLine + event_data.ParentImage: ParentImage + event_data.ParentProcessName: ParentImageName + event_data.Path: File Path + Path: File Path + event_data.PipeName: PipeName + event_data.ProcessCommandLine: Process CommandLine + ProcessCommandLine: Process CommandLine + event_data.ServiceFileName: ServiceFileName + event_data.ShareName: ShareName + event_data.Signature: Signature + event_data.SourceImage: SourceImage + event_data.StartModule: StartModule + event_data.SubjectUserName: username + event_data.SubjectUserSid: SubjectUserSid + event_data.TargetFilename: Filename + TargetFilename: Filename + event_data.TargetImage: TargetImage + TargetImage: TargetImage + event_data.TicketOptions: TicketOptions + event_data.User: username + User: username + user: username \ No newline at end of file diff --git a/tools/config/splunk-zeek.yml b/tools/config/splunk-zeek.yml index b48626715..fbb6e6790 100644 --- a/tools/config/splunk-zeek.yml +++ b/tools/config/splunk-zeek.yml @@ -12,12 +12,14 @@ logsources: service: syslog zeek-category-firewall: category: firewall - conditions: - sourcetype: 'bro:conn:json' + rewrite: + product: zeek + service: conn zeek-category-dns: category: dns - conditions: - sourcetype: 'bro:dns:json' + rewrite: + product: zeek + service: dns zeek-category-proxy: category: proxy rewrite: @@ -25,16 +27,15 @@ logsources: service: http zeek-category-webserver: category: webserver - conditions: - sourcetype: 'bro:http:json' rewrite: product: zeek service: http zeek-conn: product: zeek service: conn - conditions: - sourcetype: 'bro:conn:json' + rewrite: + product: zeek + service: conn zeek-conn_long: product: zeek service: conn_long @@ -338,6 +339,11 @@ fieldmappings: cs-method: method cs-referrer: referrer cs-version: version + # Few other variations of names from zeek source itself + id_orig_h: id.orig_h + id_orig_p: id.orig_p + id_resp_h: id.resp_h + id_resp_p: id.resp_p # Temporary one off rule name fields agent.version: version c-cookie: cookie @@ -358,7 +364,7 @@ fieldmappings: - query - host - server_name - DestinationAddress: + DestinationAddress: id.resp_h DestinationHostname: - host - query diff --git a/tools/config/winlogbeat-modules-enabled.yml b/tools/config/winlogbeat-modules-enabled.yml index 9f54bbe44..01a63a59e 100644 --- a/tools/config/winlogbeat-modules-enabled.yml +++ b/tools/config/winlogbeat-modules-enabled.yml @@ -93,7 +93,6 @@ fieldmappings: KeyLength: winlog.event_data.KeyLength LogonProcessName: winlog.event_data.LogonProcessName LogonType: winlog.event_data.LogonType - Message: winlog.event_data.Message NewProcessName: winlog.event_data.NewProcessName ObjectClass: winlog.event_data.ObjectClass ObjectName: winlog.event_data.ObjectName diff --git a/tools/config/winlogbeat-old.yml b/tools/config/winlogbeat-old.yml index d9d17a6b0..3cfe76bbe 100644 --- a/tools/config/winlogbeat-old.yml +++ b/tools/config/winlogbeat-old.yml @@ -55,9 +55,9 @@ fieldmappings: AuthenticationPackageName: event_data.AuthenticationPackageName CallingProcessName: event_data.CallingProcessName CallTrace: event_data.CallTrace + Channel: winlog.channel CommandLine: event_data.CommandLine - ComputerName: computer_name - ContextInfo: event_data.ContextInfo + ComputerName: event_data.ComputerName CurrentDirectory: event_data.CurrentDirectory Description: event_data.Description DestinationHostname: event_data.DestinationHostname @@ -83,7 +83,6 @@ fieldmappings: KeyLength: event_data.KeyLength LogonProcessName: event_data.LogonProcessName LogonType: event_data.LogonType - Message: event_data.Message NewProcessName: event_data.NewProcessName ObjectClass: event_data.ObjectClass ObjectName: event_data.ObjectName diff --git a/tools/config/winlogbeat.yml b/tools/config/winlogbeat.yml index a5707d2a6..ff5a0d6d4 100644 --- a/tools/config/winlogbeat.yml +++ b/tools/config/winlogbeat.yml @@ -55,15 +55,17 @@ fieldmappings: AuthenticationPackageName: winlog.event_data.AuthenticationPackageName CallingProcessName: winlog.event_data.CallingProcessName CallTrace: winlog.event_data.CallTrace + Channel: winlog.channel CommandLine: winlog.event_data.CommandLine - ComputerName: winlog.computer_name - ContextInfo: winlog.event_data.ContextInfo + ComputerName: winlog.ComputerName CurrentDirectory: winlog.event_data.CurrentDirectory Description: winlog.event_data.Description DestinationHostname: winlog.event_data.DestinationHostname DestinationIp: winlog.event_data.DestinationIp + dst_ip: winlog.event_data.DestinationIp DestinationIsIpv6: winlog.event_data.DestinationIsIpv6 DestinationPort: winlog.event_data.DestinationPort + dst_port: winlog.event_data.DestinationPort Details: winlog.event_data.Details EngineVersion: winlog.event_data.EngineVersion EventType: winlog.event_data.EventType @@ -83,7 +85,6 @@ fieldmappings: KeyLength: winlog.event_data.KeyLength LogonProcessName: winlog.event_data.LogonProcessName LogonType: winlog.event_data.LogonType - Message: winlog.event_data.Message NewProcessName: winlog.event_data.NewProcessName ObjectClass: winlog.event_data.ObjectClass ObjectName: winlog.event_data.ObjectName @@ -104,6 +105,8 @@ fieldmappings: Signature: winlog.event_data.Signature Source: winlog.event_data.Source SourceImage: winlog.event_data.SourceImage + SourceIp: winlog.event_data.SourceIp + src_ip: winlog.event_data.SourceIp StartModule: winlog.event_data.StartModule Status: winlog.event_data.Status SubjectUserName: winlog.event_data.SubjectUserName @@ -115,3 +118,16 @@ fieldmappings: TicketOptions: winlog.event_data.TicketOptions User: winlog.event_data.User WorkstationName: winlog.event_data.WorkstationName + # Channel: WLAN-Autoconfig AND EventID: 8001 + AuthenticationAlgorithm: winlog.event_data.AuthenticationAlgorithm + BSSID: winlog.event_data.BSSID + BSSType: winlog.event_data.BSSType + CipherAlgorithm: winlog.event_data.CipherAlgorithm + ConnectionId: winlog.event_data.ConnectionId + ConnectionMode: winlog.event_data.ConnectionMode + InterfaceDescription: winlog.event_data.InterfaceDescription + InterfaceGuid: winlog.event_data.InterfaceGuid + OnexEnabled: winlog.event_data.OnexEnabled + PHYType: winlog.event_data.PHYType + ProfileName: winlog.event_data.ProfileName + SSID: winlog.event_data.SSID From 32e4998c4967d7d09762d994e69cca4ccc143f3c Mon Sep 17 00:00:00 2001 From: Thomas Patzke Date: Sun, 24 May 2020 21:45:37 +0200 Subject: [PATCH 3/5] Removed dead code from ALA backend. --- tools/sigma/backends/ala.py | 45 +++---------------------------------- 1 file changed, 3 insertions(+), 42 deletions(-) diff --git a/tools/sigma/backends/ala.py b/tools/sigma/backends/ala.py index e3f1fd9fc..bffd4ebf5 100644 --- a/tools/sigma/backends/ala.py +++ b/tools/sigma/backends/ala.py @@ -111,10 +111,6 @@ class AzureLogAnalyticsBackend(DeepFieldMappingMixin, SingleTextQueryBackend): else: self._field_map = {} - def id_mapping(self, src): - """Identity mapping, source == target field name""" - return src - def map_sysmon_schema(self, eventid): schema_keys = [] try: @@ -154,14 +150,9 @@ class AzureLogAnalyticsBackend(DeepFieldMappingMixin, SingleTextQueryBackend): def generate(self, sigmaparser): self.table = None - try: - self.category = sigmaparser.parsedyaml['logsource'].setdefault('category', None) - self.product = sigmaparser.parsedyaml['logsource'].setdefault('product', None) - self.service = sigmaparser.parsedyaml['logsource'].setdefault('service', None) - except KeyError: - self.category = None - self.product = None - self.service = None + self.category = sigmaparser.parsedyaml['logsource'].setdefault('category', None) + self.product = sigmaparser.parsedyaml['logsource'].setdefault('product', None) + self.service = sigmaparser.parsedyaml['logsource'].setdefault('service', None) detection = sigmaparser.parsedyaml.get("detection", {}) if "keywords" in detection.keys(): @@ -324,36 +315,6 @@ class AzureLogAnalyticsBackend(DeepFieldMappingMixin, SingleTextQueryBackend): ) ) - def generateAfter(self, parsed): - del parsed - if self._fields: - all_fields = list(self._fields) - if self._agg_var: - all_fields = set(all_fields + [self._agg_var]) - project_fields = self._map_fields(all_fields) - project_list = ", ".join(str(fld) for fld in set(project_fields)) - return " | project " + project_list - return "" - - def _map_fields(self, fields): - for field in fields: - mapped_field = self._map_field(field) - if isinstance(mapped_field, str): - yield mapped_field - elif isinstance(mapped_field, list): - for subfield in mapped_field: - yield subfield - - def _map_field(self, fieldname): - mapping = self.sigmaconfig.fieldmappings.get(fieldname) - if isinstance(mapping, ConditionalFieldMapping): - fieldname = self._map_conditional_field(fieldname) - elif isinstance(mapping, MultiFieldMapping): - fieldname = mapping.resolve_fieldname(fieldname, self._parser) - elif isinstance(mapping, SimpleFieldMapping): - fieldname = mapping.resolve_fieldname(fieldname, self._parser) - return fieldname - def _map_conditional_field(self, fieldname): mapping = self.sigmaconfig.fieldmappings.get(fieldname) # if there is a conditional mapping for this fieldname From d45f8e19fef854a1779973d9a8ae51714d38257d Mon Sep 17 00:00:00 2001 From: Thomas Patzke Date: Sun, 24 May 2020 21:46:55 +0200 Subject: [PATCH 4/5] Fixes --- Makefile | 3 +++ tools/sigma/backends/base.py | 3 +-- tools/sigma/backends/elasticsearch.py | 7 +++++-- tools/sigma/backends/splunk.py | 25 ++++++++++++++----------- 4 files changed, 23 insertions(+), 15 deletions(-) diff --git a/Makefile b/Makefile index 18a3dbb74..7a2483fe5 100644 --- a/Makefile +++ b/Makefile @@ -32,6 +32,7 @@ test-sigmac: $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t elastalert-dsl -c tools/config/winlogbeat.yml -O alert_methods=http_post,email -O emails=test@test.invalid -O http_post_url=http://test.invalid rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t ee-outliers -c tools/config/winlogbeat.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t es-qs -c tools/config/ecs-cloudtrail.yml rules/ > /dev/null + $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t corelight_es-qs -c tools/config/ecs-zeek-corelight.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t es-rule -c tools/config/ecs-cloudtrail.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t kibana -c tools/config/ecs-cloudtrail.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t xpack-watcher -c tools/config/ecs-cloudtrail.yml rules/ > /dev/null @@ -55,6 +56,8 @@ test-sigmac: $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t qualys -c tools/config/qualys.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t netwitness -c tools/config/netwitness.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t sumologic -O rulecomment -c tools/config/sumologic.yml rules/ > /dev/null + $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t humio -O rulecomment -c tools/config/humio.yml rules/ > /dev/null + $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t crowdstrike -O rulecomment -c tools/config/crowdstrike.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t sql -c sysmon rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t logiq -c sysmon rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t splunk -c tools/config/splunk-windows-index.yml -f 'level>=high,level<=critical,status=stable,logsource=windows,tag=attack.execution' rules/ > /dev/null diff --git a/tools/sigma/backends/base.py b/tools/sigma/backends/base.py index e4c9239cb..3db2f0e1c 100644 --- a/tools/sigma/backends/base.py +++ b/tools/sigma/backends/base.py @@ -330,7 +330,6 @@ class SingleTextQueryBackend(RulenameCommentMixin, BaseBackend, QuoteCharMixin): return fieldname class CorelightQueryBackend: - def generate(self, sigmaparser): lgs = sigmaparser.parsedyaml.get("logsource") allow_types = { @@ -358,4 +357,4 @@ class CorelightQueryBackend: if allow_types.get(logsource_type) and value.lower() in allow_types.get(logsource_type): return super().generate(sigmaparser) lgs_text = ", ".join(["%s: %s" % (key, lgs.get(key)) for key in lgs.keys()]) - raise NotSupportedError("Corelight backend not supported logsources: %s." % lgs_text) \ No newline at end of file + raise NotSupportedError("Corelight backend not supported logsources: %s." % lgs_text) diff --git a/tools/sigma/backends/elasticsearch.py b/tools/sigma/backends/elasticsearch.py index d5b8acca5..7a298c3cf 100644 --- a/tools/sigma/backends/elasticsearch.py +++ b/tools/sigma/backends/elasticsearch.py @@ -23,7 +23,7 @@ from random import randrange import sigma import yaml -from sigma.parser.modifiers.type import SigmaRegularExpressionModifier +from sigma.parser.modifiers.type import SigmaRegularExpressionModifier, SigmaTypeModifier from sigma.parser.condition import ConditionOR, ConditionAND, NodeSubexpression from sigma.config.mapping import ConditionalFieldMapping @@ -119,7 +119,10 @@ class ElasticsearchWildcardHandlingMixin(object): if isinstance(value, list): res = [] for item in value: - res.extend([item.lower(), item.upper()]) + try: + res.extend([item.lower(), item.upper()]) + except AttributeError: # not a string (something that doesn't support upper/lower casing) + res.append(item) value = res elif isinstance(value, str): value = [value.upper(), value.lower()] diff --git a/tools/sigma/backends/splunk.py b/tools/sigma/backends/splunk.py index 614173f1e..3efb3d2b5 100644 --- a/tools/sigma/backends/splunk.py +++ b/tools/sigma/backends/splunk.py @@ -189,18 +189,21 @@ class CrowdStrikeBackend(SplunkBackend): detections = sigmaparser.definitions all_fields = dict() for det in detections.values(): - for field, value in det.items(): - if "|" in field: - field = field.split("|")[0] - if any([item for item in fieldmappings.keys() if field == item]): - if field == "EventID" and str(value) == str(1) and lgs.get("service") == "sysmon": - all_fields.update(det) - elif field != "EventID": - all_fields.update(det) + try: + for field, value in det.items(): + if "|" in field: + field = field.split("|")[0] + if any([item for item in fieldmappings.keys() if field == item]): + if field == "EventID" and str(value) == str(1) and lgs.get("service") == "sysmon": + all_fields.update(det) + elif field != "EventID": + all_fields.update(det) + else: + raise NotImplementedError("Not supported fields!") else: raise NotImplementedError("Not supported fields!") - else: - raise NotImplementedError("Not supported fields!") + except AttributeError: # ignore if detection is not a dict + pass table_fields = sigmaparser.parsedyaml.get("fields", []) res_table_fields = [] @@ -210,4 +213,4 @@ class CrowdStrikeBackend(SplunkBackend): sigmaparser.parsedyaml["fields"] = res_table_fields return super().generate(sigmaparser) else: - raise NotImplementedError("Not supported logsources!") \ No newline at end of file + raise NotImplementedError("Not supported logsources!") From daf7ab5ff71c48aec9fab29a70c62a3fec310768 Mon Sep 17 00:00:00 2001 From: Thomas Patzke Date: Sun, 24 May 2020 22:41:38 +0200 Subject: [PATCH 5/5] Cleanup: removal of corelight_* backends --- Makefile | 1 - tools/sigma/backends/base.py | 30 --------------------------- tools/sigma/backends/elasticsearch.py | 19 +---------------- tools/sigma/backends/splunk.py | 8 +------ 4 files changed, 2 insertions(+), 56 deletions(-) diff --git a/Makefile b/Makefile index 7a2483fe5..1d36cd903 100644 --- a/Makefile +++ b/Makefile @@ -32,7 +32,6 @@ test-sigmac: $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t elastalert-dsl -c tools/config/winlogbeat.yml -O alert_methods=http_post,email -O emails=test@test.invalid -O http_post_url=http://test.invalid rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t ee-outliers -c tools/config/winlogbeat.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t es-qs -c tools/config/ecs-cloudtrail.yml rules/ > /dev/null - $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t corelight_es-qs -c tools/config/ecs-zeek-corelight.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t es-rule -c tools/config/ecs-cloudtrail.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t kibana -c tools/config/ecs-cloudtrail.yml rules/ > /dev/null $(COVERAGE) run -a --include=$(COVSCOPE) tools/sigmac -rvdI -t xpack-watcher -c tools/config/ecs-cloudtrail.yml rules/ > /dev/null diff --git a/tools/sigma/backends/base.py b/tools/sigma/backends/base.py index 3db2f0e1c..1ef7e175a 100644 --- a/tools/sigma/backends/base.py +++ b/tools/sigma/backends/base.py @@ -328,33 +328,3 @@ class SingleTextQueryBackend(RulenameCommentMixin, BaseBackend, QuoteCharMixin): transformed from the original name given in the Sigma rule. """ return fieldname - -class CorelightQueryBackend: - def generate(self, sigmaparser): - lgs = sigmaparser.parsedyaml.get("logsource") - allow_types = { - 'category': - [ - 'proxy', 'firewall', 'webserver', 'accounting', 'dns' - ], - 'product': - [ - 'zeek', 'apache', 'netflow', 'firewall' - ], - 'service': [ - 'radius', 'kerberos', 'pe', 'ntlm', 'sip', 'syslog', 'ntp', - 'mqtt_subscribe', 'smb_files', 'irc', 'http2', 'rfb', - 'tunnel', 'socks', 'mqtt_publish', 'network', 'weird', - 'known_certs', 'traceroute', 'modbus', 'smtp_links', - 'ssl', 'known_hosts', 'software', 'smtp', 'tls', 'intel', - 'ssh', 'dce_rpc', 'x509', 'known_services', 'http', 'files', - 'gquic', 'ftp', 'dns', 'conn', 'dnp3', 'rdp', 'dpd', - 'known_modbus', 'conn_long', 'modbus_register_change', - 'mqtt_connect', 'pop3', 'mysql', 'notice', 'snmp', 'smb_mapping' - ] - } - for logsource_type, value in lgs.items(): - if allow_types.get(logsource_type) and value.lower() in allow_types.get(logsource_type): - return super().generate(sigmaparser) - lgs_text = ", ".join(["%s: %s" % (key, lgs.get(key)) for key in lgs.keys()]) - raise NotSupportedError("Corelight backend not supported logsources: %s." % lgs_text) diff --git a/tools/sigma/backends/elasticsearch.py b/tools/sigma/backends/elasticsearch.py index 7a298c3cf..88cdd9c6c 100644 --- a/tools/sigma/backends/elasticsearch.py +++ b/tools/sigma/backends/elasticsearch.py @@ -27,7 +27,7 @@ from sigma.parser.modifiers.type import SigmaRegularExpressionModifier, SigmaTyp from sigma.parser.condition import ConditionOR, ConditionAND, NodeSubexpression from sigma.config.mapping import ConditionalFieldMapping -from .base import BaseBackend, SingleTextQueryBackend, CorelightQueryBackend +from .base import BaseBackend, SingleTextQueryBackend from .mixins import RulenameCommentMixin, MultiRuleOutputMixin from .exceptions import NotSupportedError @@ -298,11 +298,6 @@ class ElasticsearchQuerystringBackend(DeepFieldMappingMixin, ElasticsearchWildca else: return super().generateSubexpressionNode(node) - -class ElasticsearchCorelightBackend(CorelightQueryBackend, ElasticsearchQuerystringBackend): - identifier = "corelight_es-qs" - - class ElasticsearchDSLBackend(DeepFieldMappingMixin, RulenameCommentMixin, ElasticsearchWildcardHandlingMixin, BaseBackend): """ElasticSearch DSL backend""" identifier = 'es-dsl' @@ -662,11 +657,6 @@ class KibanaBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin): def index_variable_name(self, index): return "index_" + index.replace("-", "__").replace("*", "X") - -class KibanaCorelightBackend(CorelightQueryBackend, KibanaBackend): - identifier = "corelight_kibana" - - class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin): """Converts Sigma Rule into X-Pack Watcher JSON for alerting""" identifier = "xpack-watcher" @@ -973,10 +963,6 @@ class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin) raise NotImplementedError("Output type '%s' not supported" % self.output_type) return result -class XPackWatcherCorelightBackend(CorelightQueryBackend, XPackWatcherBackend): - identifier = "corelight_xpack-watcher" - - class ElastalertBackend(DeepFieldMappingMixin, MultiRuleOutputMixin): """Elastalert backend""" active = True @@ -1334,6 +1320,3 @@ class ElasticSearchRuleBackend(ElasticsearchQuerystringBackend): if references: rule.update({"references": references}) return json.dumps(rule) - -class ElasticSearchRuleCorelightBackend(CorelightQueryBackend, ElasticSearchRuleBackend): - identifier = "corelight_elasticsearch-rule" diff --git a/tools/sigma/backends/splunk.py b/tools/sigma/backends/splunk.py index 3efb3d2b5..75658343a 100644 --- a/tools/sigma/backends/splunk.py +++ b/tools/sigma/backends/splunk.py @@ -16,7 +16,7 @@ import re import sigma -from .base import SingleTextQueryBackend, CorelightQueryBackend +from .base import SingleTextQueryBackend from .mixins import MultiRuleOutputMixin class SplunkBackend(SingleTextQueryBackend): @@ -172,12 +172,6 @@ class SplunkXMLBackend(SingleTextQueryBackend, MultiRuleOutputMixin): self.queries += self.dash_suf return self.queries - -class SplunkCorelightBackend(CorelightQueryBackend, SplunkBackend): - identifier = "corelight_splunk" - - - class CrowdStrikeBackend(SplunkBackend): """Converts Sigma rule into CrowdStrike Search Processing Language (SPL).""" identifier = "crowdstrike"