2020-06-29 23:17:38 -06:00
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
2021-03-03 22:12:11 -09:00
# or more contributor license agreements. Licensed under the Elastic License
# 2.0; you may not use this file except in compliance with the Elastic License
# 2.0.
2020-06-29 23:17:38 -06:00
""" Packaging and preparation for releases. """
2025-07-01 15:20:55 +02:00
2020-06-29 23:17:38 -06:00
import base64
import hashlib
import json
import shutil
2021-06-15 07:54:50 -06:00
import textwrap
2021-06-16 18:02:47 -06:00
from collections import defaultdict
2025-07-01 15:20:55 +02:00
from datetime import UTC , date , datetime
2021-02-08 20:43:16 -09:00
from pathlib import Path
2025-07-01 15:20:55 +02:00
from typing import Any
2020-06-29 23:17:38 -06:00
import click
2021-03-09 13:30:12 -09:00
import yaml
2025-07-01 15:20:55 +02:00
from semver import Version
2020-06-29 23:17:38 -06:00
2024-08-06 18:07:12 -04:00
from . config import load_current_package_version , parse_rules_config
from . misc import JS_LICENSE , cached
2025-07-01 15:20:55 +02:00
from . navigator import Navigator , NavigatorBuilder
from . rule import QueryRuleData , ThreatMapping , TOMLRule
2024-08-06 18:07:12 -04:00
from . rule_loader import DeprecatedCollection , RuleCollection
2021-05-13 14:27:32 -06:00
from . schemas import definitions
2025-07-01 15:20:55 +02:00
from . utils import Ndjson , get_etc_path , get_path
2024-08-06 18:07:12 -04:00
from . version_lock import loaded_version_lock
2020-06-29 23:17:38 -06:00
2024-08-06 18:07:12 -04:00
RULES_CONFIG = parse_rules_config ( )
2025-07-01 15:20:55 +02:00
RELEASE_DIR = get_path ( [ " releases " ] )
2024-08-06 18:07:12 -04:00
PACKAGE_FILE = str ( RULES_CONFIG . packages_file )
2025-07-01 15:20:55 +02:00
NOTICE_FILE = get_path ( [ " NOTICE.txt " ] )
FLEET_PKG_LOGO = get_etc_path ( [ " security-logo-color-64px.svg " ] )
2021-03-24 10:24:32 -06:00
2020-06-29 23:17:38 -06:00
2025-07-01 15:20:55 +02:00
def filter_rule ( rule : TOMLRule , config_filter : dict [ str , Any ] , exclude_fields : dict [ str , Any ] | None = None ) - > bool :
2020-06-29 23:17:38 -06:00
""" Filter a rule based off metadata and a package configuration. """
2021-03-24 10:24:32 -06:00
flat_rule = rule . contents . flattened_dict ( )
2020-06-29 23:17:38 -06:00
for key , values in config_filter . items ( ) :
if key not in flat_rule :
return False
2025-07-01 15:20:55 +02:00
values_set = { v . lower ( ) if isinstance ( v , str ) else v for v in values }
2020-06-29 23:17:38 -06:00
rule_value = flat_rule [ key ]
if isinstance ( rule_value , list ) :
2025-07-01 15:20:55 +02:00
rule_values : set [ Any ] = { v . lower ( ) if isinstance ( v , str ) else v for v in rule_value } # type: ignore[reportUnknownVariableType]
2020-06-29 23:17:38 -06:00
else :
rule_values = { rule_value . lower ( ) if isinstance ( rule_value , str ) else rule_value }
2025-07-01 15:20:55 +02:00
if len ( rule_values & values_set ) == 0 :
2020-06-29 23:17:38 -06:00
return False
2021-03-17 00:31:33 -05:00
exclude_fields = exclude_fields or { }
2021-09-10 10:06:04 -08:00
if exclude_fields :
from . rule import get_unique_query_fields
unique_fields = get_unique_query_fields ( rule )
for index , fields in exclude_fields . items ( ) :
2025-07-01 15:20:55 +02:00
if (
unique_fields
and ( rule . contents . data . index_or_dataview == index or index == " any " ) # type: ignore[reportAttributeAccessIssue] # noqa: PLR1714
and ( set ( unique_fields ) & set ( fields ) )
) :
return False
2020-09-23 17:36:34 -05:00
2020-06-29 23:17:38 -06:00
return True
2024-05-23 17:36:51 -04:00
CURRENT_RELEASE_PATH = RELEASE_DIR / load_current_package_version ( )
2022-03-04 08:20:44 -09:00
2025-07-01 15:20:55 +02:00
class Package :
2020-06-29 23:17:38 -06:00
""" Packaging object for siem rules and releases. """
2025-07-01 15:20:55 +02:00
def __init__ ( # noqa: PLR0913
self ,
rules : RuleCollection ,
name : str ,
release : bool | None = False ,
min_version : int | None = None ,
max_version : int | None = None ,
registry_data : dict [ str , Any ] | None = None ,
generate_navigator : bool = False ,
verbose : bool = True ,
historical : bool = False ,
) - > None :
2020-06-29 23:17:38 -06:00
""" Initialize a package. """
self . name = name
2021-03-24 10:24:32 -06:00
self . rules = rules
2021-09-01 15:29:53 -08:00
self . deprecated_rules : DeprecatedCollection = rules . deprecated
2020-06-29 23:17:38 -06:00
self . release = release
2021-03-09 13:30:12 -09:00
self . registry_data = registry_data or { }
2022-03-04 09:55:11 -09:00
self . generate_navigator = generate_navigator
2023-04-21 11:03:29 -04:00
self . historical = historical
2020-06-29 23:17:38 -06:00
2021-08-24 16:56:11 -06:00
if min_version is not None :
2025-07-01 15:20:55 +02:00
self . rules = self . rules . filter ( lambda r : min_version < = r . contents . saved_version ) # type: ignore[reportOperatorIssue]
2020-06-29 23:17:38 -06:00
2021-08-24 16:56:11 -06:00
if max_version is not None :
2025-07-01 15:20:55 +02:00
self . rules = self . rules . filter ( lambda r : max_version > = r . contents . saved_version ) # type: ignore[reportOperatorIssue]
2020-06-29 23:17:38 -06:00
2025-07-01 15:20:55 +02:00
if RULES_CONFIG . bypass_version_lock :
raise ValueError ( " Packaging can not be used when version locking is bypassed. " )
self . changed_ids , self . new_ids , self . removed_ids = loaded_version_lock . manage_versions (
self . rules ,
verbose = verbose ,
save_changes = False ,
)
2020-06-29 23:17:38 -06:00
2021-03-09 13:30:12 -09:00
@classmethod
2025-07-01 15:20:55 +02:00
def load_configs ( cls ) - > Any :
2024-08-06 18:07:12 -04:00
""" Load configs from packages.yaml. """
2025-07-01 15:20:55 +02:00
return RULES_CONFIG . packages [ " package " ]
2021-03-09 13:30:12 -09:00
2020-07-08 13:17:42 -05:00
@staticmethod
2025-07-01 15:20:55 +02:00
def _package_kibana_notice_file ( save_dir : Path ) - > None :
2020-07-08 13:17:42 -05:00
""" Convert and save notice file with package. """
2025-07-01 15:20:55 +02:00
with NOTICE_FILE . open ( ) as f :
2020-07-08 13:17:42 -05:00
notice_txt = f . read ( )
2025-07-01 15:20:55 +02:00
with ( save_dir / " notice.ts " ) . open ( " w " ) as f :
commented_notice = [ f " * { line } " . rstrip ( ) for line in notice_txt . splitlines ( ) ]
lines = [ " /* eslint-disable @kbn/eslint/require-license-header */ " , " " , " /* @notice " ]
lines = lines + commented_notice + [ " */ " , " " ]
_ = f . write ( " \n " . join ( lines ) )
2020-07-08 13:17:42 -05:00
2025-07-01 15:20:55 +02:00
def _package_kibana_index_file ( self , save_dir : Path ) - > None :
2020-07-08 18:58:00 -05:00
""" Convert and save index file with package. """
2025-07-01 15:20:55 +02:00
sorted_rules = sorted ( self . rules , key = lambda k : ( k . contents . metadata . creation_date , k . path . name ) ) # type: ignore[reportOptionalMemberAccess]
2020-07-08 18:58:00 -05:00
comments = [
2025-07-01 15:20:55 +02:00
" // Auto generated file from either: " ,
" // - scripts/regen_prepackage_rules_index.sh " ,
" // - detection-rules repo using CLI command build-release " ,
" // Do not hand edit. Run script/command to regenerate package information instead " ,
]
rule_imports = [
f " import rule { i } from ' ./ { r . path . name + ' .json ' } ' ; " # type: ignore[reportOptionalMemberAccess]
for i , r in enumerate ( sorted_rules , 1 )
2020-07-08 18:58:00 -05:00
]
2025-07-01 15:20:55 +02:00
const_exports = [ " export const rawRules = [ " ]
2020-07-08 18:58:00 -05:00
const_exports . extend ( f " rule { i } , " for i , _ in enumerate ( sorted_rules , 1 ) )
const_exports . append ( " ]; " )
2020-07-09 17:33:28 -06:00
const_exports . append ( " " )
2020-07-08 18:58:00 -05:00
index_ts = [ JS_LICENSE , " " ]
index_ts . extend ( comments )
index_ts . append ( " " )
index_ts . extend ( rule_imports )
index_ts . append ( " " )
index_ts . extend ( const_exports )
2020-07-09 17:33:28 -06:00
2025-07-01 15:20:55 +02:00
with ( save_dir / " index.ts " ) . open ( " w " ) as f :
_ = f . write ( " \n " . join ( index_ts ) )
2020-07-08 18:58:00 -05:00
2025-07-01 15:20:55 +02:00
def save_release_files (
self ,
directory : Path ,
changed_rules : list [ definitions . UUIDString ] ,
new_rules : list [ str ] ,
removed_rules : list [ str ] ,
) - > None :
2020-06-29 23:17:38 -06:00
""" Release a package. """
2020-09-30 17:43:45 -05:00
summary , changelog = self . generate_summary_and_changelog ( changed_rules , new_rules , removed_rules )
2025-07-01 15:20:55 +02:00
with ( directory / f " { self . name } -summary.txt " ) . open ( " w " ) as f :
_ = f . write ( summary )
with ( directory / f " { self . name } -changelog-entry.md " ) . open ( " w " ) as f :
_ = f . write ( changelog )
2021-02-10 10:37:26 -09:00
2022-03-04 09:55:11 -09:00
if self . generate_navigator :
2025-07-01 15:20:55 +02:00
_ = self . generate_attack_navigator ( Path ( directory ) )
2022-03-04 08:20:44 -09:00
2021-02-10 10:37:26 -09:00
consolidated = json . loads ( self . get_consolidated ( ) )
2025-07-01 15:20:55 +02:00
with ( directory / f " { self . name } -consolidated-rules.json " ) . open ( " w " ) as f :
2021-02-10 10:37:26 -09:00
json . dump ( consolidated , f , sort_keys = True , indent = 2 )
consolidated_rules = Ndjson ( consolidated )
2025-07-01 15:20:55 +02:00
consolidated_rules . dump ( Path ( directory ) . joinpath ( f " { self . name } -consolidated-rules.ndjson " ) , sort_keys = True )
2021-02-10 10:37:26 -09:00
2025-07-01 15:20:55 +02:00
self . generate_xslx ( str ( directory / f " { self . name } -summary.xlsx " ) )
2020-06-29 23:17:38 -06:00
2021-02-10 10:37:26 -09:00
bulk_upload , rules_ndjson = self . create_bulk_index_body ( )
2025-07-01 15:20:55 +02:00
bulk_upload . dump (
directory / f " { self . name } -enriched-rules-index-uploadable.ndjson " ,
sort_keys = True ,
)
rules_ndjson . dump (
directory / f " { self . name } -enriched-rules-index-importable.ndjson " ,
sort_keys = True ,
)
def get_consolidated ( self , as_api : bool = True ) - > str :
2020-06-29 23:17:38 -06:00
""" Get a consolidated package of the rules in a single file. """
2025-07-01 15:20:55 +02:00
full_package = [ rule . contents . to_api_format ( ) if as_api else rule . contents . to_dict ( ) for rule in self . rules ]
2020-06-29 23:17:38 -06:00
return json . dumps ( full_package , sort_keys = True )
2025-07-01 15:20:55 +02:00
def save ( self , verbose : bool = True ) - > None :
2020-06-29 23:17:38 -06:00
""" Save a package and all artifacts. """
2024-05-23 17:36:51 -04:00
save_dir = RELEASE_DIR / self . name
2025-07-01 15:20:55 +02:00
rules_dir = save_dir / " rules "
extras_dir = save_dir / " extras "
2020-06-29 23:17:38 -06:00
# remove anything that existed before
shutil . rmtree ( save_dir , ignore_errors = True )
2024-05-23 17:36:51 -04:00
rules_dir . mkdir ( parents = True , exist_ok = True )
extras_dir . mkdir ( parents = True , exist_ok = True )
2020-06-29 23:17:38 -06:00
for rule in self . rules :
2025-07-01 15:20:55 +02:00
if not rule . path :
raise ValueError ( " Rule path is not found " )
rule . save_json ( rules_dir / Path ( rule . path . name ) . with_suffix ( " .json " ) )
2020-06-29 23:17:38 -06:00
2021-02-10 10:37:26 -09:00
self . _package_kibana_notice_file ( rules_dir )
self . _package_kibana_index_file ( rules_dir )
2020-07-08 13:17:42 -05:00
2020-06-29 23:17:38 -06:00
if self . release :
2021-03-09 13:30:12 -09:00
self . _generate_registry_package ( save_dir )
2021-08-24 16:56:11 -06:00
self . save_release_files ( extras_dir , self . changed_ids , self . new_ids , self . removed_ids )
2020-06-29 23:17:38 -06:00
# zip all rules only and place in extras
2025-07-01 15:20:55 +02:00
_ = shutil . make_archive (
str ( extras_dir / self . name ) ,
" zip " ,
root_dir = rules_dir . parent ,
base_dir = rules_dir . name ,
)
2020-06-29 23:17:38 -06:00
# zip everything and place in release root
2025-07-01 15:20:55 +02:00
_ = shutil . make_archive (
str ( save_dir / f " { self . name } -all " ) ,
" zip " ,
root_dir = extras_dir . parent ,
base_dir = extras_dir . name ,
2024-05-23 17:36:51 -04:00
)
2020-06-29 23:17:38 -06:00
if verbose :
2025-07-01 15:20:55 +02:00
click . echo ( f " Package saved to: { save_dir } " )
def export (
self ,
outfile : Path ,
downgrade_version : definitions . SemVer | None = None ,
verbose : bool = True ,
skip_unsupported : bool = False ,
) - > None :
2021-02-08 20:43:16 -09:00
""" Export rules into a consolidated ndjson file. """
2025-07-01 15:20:55 +02:00
from . main import _export_rules # type: ignore[reportPrivateUsage]
2021-02-08 20:43:16 -09:00
2025-07-01 15:20:55 +02:00
_export_rules (
self . rules ,
outfile = outfile ,
downgrade_version = downgrade_version ,
verbose = verbose ,
skip_unsupported = skip_unsupported ,
)
2021-02-08 20:43:16 -09:00
2025-07-01 15:20:55 +02:00
def get_package_hash ( self , as_api : bool = True , verbose : bool = True ) - > str :
2020-06-29 23:17:38 -06:00
""" Get hash of package contents. """
2025-07-01 15:20:55 +02:00
contents = base64 . b64encode ( self . get_consolidated ( as_api = as_api ) . encode ( " utf-8 " ) )
2020-06-29 23:17:38 -06:00
sha256 = hashlib . sha256 ( contents ) . hexdigest ( )
if verbose :
2025-07-01 15:20:55 +02:00
click . echo ( f " - sha256: { sha256 } " )
2020-06-29 23:17:38 -06:00
return sha256
@classmethod
2025-07-01 15:20:55 +02:00
def from_config (
cls ,
rule_collection : RuleCollection | None = None ,
config : dict [ str , Any ] | None = None ,
verbose : bool = False ,
historical : bool = True ,
) - > " Package " :
2020-06-29 23:17:38 -06:00
""" Load a rules package given a config. """
2024-08-06 18:07:12 -04:00
all_rules = rule_collection or RuleCollection . default ( )
2020-06-29 23:17:38 -06:00
config = config or { }
2025-07-01 15:20:55 +02:00
exclude_fields = config . pop ( " exclude_fields " , { } )
2021-09-01 15:29:53 -08:00
# deprecated rules are now embedded in the RuleCollection.deprecated - this is left here for backwards compat
2025-07-01 15:20:55 +02:00
config . pop ( " log_deprecated " , False )
rule_filter = config . pop ( " filter " , { } )
2020-06-29 23:17:38 -06:00
2021-04-05 14:23:37 -06:00
rules = all_rules . filter ( lambda r : filter_rule ( r , rule_filter , exclude_fields ) )
2020-09-23 17:36:34 -05:00
2021-10-19 21:47:36 -08:00
# add back in deprecated fields
rules . deprecated = all_rules . deprecated
2020-09-23 17:36:34 -05:00
if verbose :
2025-07-01 15:20:55 +02:00
click . echo ( f " - { len ( all_rules ) - len ( rules ) } rules excluded from package " )
2021-09-01 15:29:53 -08:00
2025-07-01 15:20:55 +02:00
return cls ( rules , verbose = verbose , historical = historical , * * config )
2020-06-29 23:17:38 -06:00
2025-07-01 15:20:55 +02:00
def generate_summary_and_changelog ( # noqa: PLR0915
self ,
changed_rule_ids : list [ definitions . UUIDString ] ,
new_rule_ids : list [ str ] ,
removed_rules : list [ str ] ,
) - > tuple [ str , str ] :
2020-06-29 23:17:38 -06:00
""" Generate stats on package. """
2020-09-30 17:43:45 -05:00
2025-07-01 15:20:55 +02:00
summary : dict [ str , dict [ str , list [ str ] ] ] = {
" changed " : defaultdict ( list ) ,
" added " : defaultdict ( list ) ,
" removed " : defaultdict ( list ) ,
" unchanged " : defaultdict ( list ) ,
2020-09-30 17:43:45 -05:00
}
2025-07-01 15:20:55 +02:00
changelog : dict [ str , dict [ str , list [ str ] ] ] = {
" changed " : defaultdict ( list ) ,
" added " : defaultdict ( list ) ,
" removed " : defaultdict ( list ) ,
" unchanged " : defaultdict ( list ) ,
2020-09-30 17:43:45 -05:00
}
2024-06-28 23:18:09 +05:30
# Build an index map first
2020-09-30 17:43:45 -05:00
longest_name = 0
2025-07-01 15:20:55 +02:00
indexes : set [ str ] = set ( )
2020-09-30 17:43:45 -05:00
for rule in self . rules :
longest_name = max ( longest_name , len ( rule . name ) )
2021-03-24 10:24:32 -06:00
index_list = getattr ( rule . contents . data , " index " , [ ] )
2020-09-30 17:43:45 -05:00
if index_list :
indexes . update ( index_list )
2024-06-28 23:18:09 +05:30
index_map = { index : str ( i ) for i , index in enumerate ( sorted ( indexes ) ) }
2020-09-30 17:43:45 -05:00
2025-07-01 15:20:55 +02:00
def get_summary_rule_info ( r : TOMLRule ) - > str :
contents = r . contents
rule_str = f " { r . name : < { longest_name } } (v: { contents . autobumped_version } t: { contents . data . type } "
2021-04-21 14:55:26 -06:00
if isinstance ( rule . contents . data , QueryRuleData ) :
2025-07-01 15:20:55 +02:00
index : list [ str ] = rule . contents . data . get ( " index " ) or [ ]
rule_str + = f " - { contents . data . language } " # type: ignore[reportAttributeAccessIssue]
rule_str + = f " (indexes: { ' ' . join ( index_map [ idx ] for idx in index ) or ' none ' } "
2021-03-24 10:24:32 -06:00
2020-09-30 17:43:45 -05:00
return rule_str
2025-07-01 15:20:55 +02:00
def get_markdown_rule_info ( r : TOMLRule , sd : str ) - > str :
2020-10-22 09:07:04 -04:00
# lookup the rule in the GitHub tag v{major.minor.patch}
2025-07-01 15:20:55 +02:00
if not r . path :
raise ValueError ( " Unknown rule path " )
2021-03-24 10:24:32 -06:00
data = r . contents . data
2025-07-01 15:20:55 +02:00
rules_dir_link = f " https://github.com/elastic/detection-rules/tree/v { self . name } /rules/ { sd } / "
2021-04-21 14:55:26 -06:00
rule_type = data . language if isinstance ( data , QueryRuleData ) else data . type
2025-07-01 15:20:55 +02:00
return f " ` { r . id } ` **[ { r . name } ]( { rules_dir_link + r . path . name } )** (_ { rule_type } _) "
2020-06-29 23:17:38 -06:00
for rule in self . rules :
2025-07-01 15:20:55 +02:00
if not rule . path :
raise ValueError ( " Unknown rule path " )
sub_dir = rule . path . parent . name
2020-09-30 17:43:45 -05:00
if rule . id in changed_rule_ids :
2025-07-01 15:20:55 +02:00
summary [ " changed " ] [ sub_dir ] . append ( get_summary_rule_info ( rule ) )
changelog [ " changed " ] [ sub_dir ] . append ( get_markdown_rule_info ( rule , sub_dir ) )
2020-09-30 17:43:45 -05:00
elif rule . id in new_rule_ids :
2025-07-01 15:20:55 +02:00
summary [ " added " ] [ sub_dir ] . append ( get_summary_rule_info ( rule ) )
changelog [ " added " ] [ sub_dir ] . append ( get_markdown_rule_info ( rule , sub_dir ) )
2020-09-30 17:43:45 -05:00
else :
2025-07-01 15:20:55 +02:00
summary [ " unchanged " ] [ sub_dir ] . append ( get_summary_rule_info ( rule ) )
changelog [ " unchanged " ] [ sub_dir ] . append ( get_markdown_rule_info ( rule , sub_dir ) )
2020-09-30 17:43:45 -05:00
for rule in self . deprecated_rules :
2025-07-01 15:20:55 +02:00
if not rule . path :
raise ValueError ( " Unknown rule path " )
sub_dir = rule . path . parent . name
if not rule . name :
raise ValueError ( " Rule name is not found " )
2020-09-30 17:43:45 -05:00
if rule . id in removed_rules :
2025-07-01 15:20:55 +02:00
summary [ " removed " ] [ sub_dir ] . append ( rule . name )
changelog [ " removed " ] [ sub_dir ] . append ( rule . name )
2020-09-30 17:43:45 -05:00
2025-07-01 15:20:55 +02:00
def format_summary_rule_str ( rule_dict : dict [ str , Any ] ) - > str :
str_fmt = " "
2020-09-30 17:43:45 -05:00
for sd , rules in sorted ( rule_dict . items ( ) , key = lambda x : x [ 0 ] ) :
2025-07-01 15:20:55 +02:00
str_fmt + = f " \n { sd } ( { len ( rules ) } ) \n "
str_fmt + = " \n " . join ( " - " + s for s in sorted ( rules ) )
return str_fmt or " \n None "
2020-09-30 17:43:45 -05:00
2025-07-01 15:20:55 +02:00
def format_changelog_rule_str ( rule_dict : dict [ str , Any ] ) - > str :
str_fmt = " "
2020-09-30 17:43:45 -05:00
for sd , rules in sorted ( rule_dict . items ( ) , key = lambda x : x [ 0 ] ) :
2025-07-01 15:20:55 +02:00
str_fmt + = f " \n - ** { sd } ** ( { len ( rules ) } ) \n "
str_fmt + = " \n " . join ( " - " + s for s in sorted ( rules ) )
return str_fmt or " \n None "
2020-09-30 17:43:45 -05:00
2025-07-01 15:20:55 +02:00
def rule_count ( rule_dict : dict [ str , Any ] ) - > int :
2020-09-30 17:43:45 -05:00
count = 0
2025-07-01 15:20:55 +02:00
for rules in rule_dict . values ( ) :
2020-09-30 17:43:45 -05:00
count + = len ( rules )
return count
2025-07-01 15:20:55 +02:00
today = str ( date . today ( ) ) # noqa: DTZ011
summary_fmt = [
f " { sf . capitalize ( ) } ( { rule_count ( summary [ sf ] ) } ): \n { format_summary_rule_str ( summary [ sf ] ) } \n "
for sf in ( " added " , " changed " , " removed " , " unchanged " )
if summary [ sf ]
]
change_fmt = [
f " { sf . capitalize ( ) } ( { rule_count ( changelog [ sf ] ) } ): \n { format_changelog_rule_str ( changelog [ sf ] ) } \n "
for sf in ( " added " , " changed " , " removed " )
if changelog [ sf ]
]
summary_str = " \n " . join (
[
f " Version { self . name } " ,
f " Generated: { today } " ,
f " Total Rules: { len ( self . rules ) } " ,
f " Package Hash: { self . get_package_hash ( verbose = False ) } " ,
" --- " ,
" (v: version, t: rule_type-language) " ,
" Index Map: \n {} " . format ( " \n " . join ( f " { v } : { k } " for k , v in index_map . items ( ) ) ) ,
" " ,
" Rules " ,
* summary_fmt ,
]
)
changelog_str = " \n " . join (
[ f " # Version { self . name } " , f " _Released { today } _ " , " " , " ### Rules " , * change_fmt , " " , " ### CLI " ]
)
2020-09-30 17:43:45 -05:00
return summary_str , changelog_str
2025-07-01 15:20:55 +02:00
def generate_attack_navigator ( self , path : Path ) - > dict [ Path , Navigator ] :
2022-03-04 08:20:44 -09:00
""" Generate ATT&CK navigator layer files. """
2025-07-01 15:20:55 +02:00
save_dir = path / " navigator_layers "
2022-03-04 08:20:44 -09:00
save_dir . mkdir ( )
lb = NavigatorBuilder ( self . rules . rules )
return lb . save_all ( save_dir , verbose = False )
2025-07-01 15:20:55 +02:00
def generate_xslx ( self , path : str ) - > None :
2020-09-30 17:43:45 -05:00
""" Generate a detailed breakdown of a package in an excel file. """
from . docs import PackageDocument
doc = PackageDocument ( path , self )
doc . populate ( )
doc . close ( )
2020-06-29 23:17:38 -06:00
2025-07-01 15:20:55 +02:00
def _generate_registry_package ( self , save_dir : Path ) - > None :
2021-03-09 13:30:12 -09:00
""" Generate the artifact for the oob package-storage. """
2025-07-01 15:20:55 +02:00
from . schemas . registry_package import RegistryPackageManifestV1 , RegistryPackageManifestV3
2021-03-09 13:30:12 -09:00
2023-11-01 12:47:40 -04:00
# 8.12.0+ we use elastic package v3
stack_version = Version . parse ( self . name , optional_minor_and_patch = True )
2025-07-01 15:20:55 +02:00
if stack_version > = Version . parse ( " 8.12.0 " ) :
2023-11-01 12:47:40 -04:00
manifest = RegistryPackageManifestV3 . from_dict ( self . registry_data )
else :
manifest = RegistryPackageManifestV1 . from_dict ( self . registry_data )
2021-03-09 13:30:12 -09:00
2025-07-01 15:20:55 +02:00
package_dir = Path ( save_dir ) / " fleet " / manifest . version
docs_dir = package_dir / " docs "
rules_dir = package_dir / " kibana " / definitions . ASSET_TYPE
2021-03-09 13:30:12 -09:00
docs_dir . mkdir ( parents = True )
rules_dir . mkdir ( parents = True )
2025-07-01 15:20:55 +02:00
manifest_file = package_dir / " manifest.yml "
readme_file = docs_dir / " README.md "
notice_file = package_dir / " NOTICE.txt "
logo_file = package_dir / " img " / " security-logo-color-64px.svg "
2021-03-09 13:30:12 -09:00
2021-06-17 06:16:09 -06:00
manifest_file . write_text ( yaml . safe_dump ( manifest . to_dict ( ) ) )
2021-06-15 07:54:50 -06:00
logo_file . parent . mkdir ( parents = True )
shutil . copyfile ( FLEET_PKG_LOGO , logo_file )
2021-03-09 13:30:12 -09:00
for rule in self . rules :
2023-04-21 11:03:29 -04:00
asset = rule . get_asset ( )
2024-08-21 18:00:02 -04:00
# if this package includes historical rules the IDs need to be changed
# asset['id] and the file name needs to resemble RULEID_VERSION instead of RULEID
asset_id = f " { asset [ ' attributes ' ] [ ' rule_id ' ] } _ { asset [ ' attributes ' ] [ ' version ' ] } "
asset [ " id " ] = asset_id
2025-07-01 15:20:55 +02:00
asset_path = rules_dir / f " { asset_id } .json "
2024-08-21 18:00:02 -04:00
2023-04-21 11:03:29 -04:00
asset_path . write_text ( json . dumps ( asset , indent = 4 , sort_keys = True ) , encoding = " utf-8 " )
2021-03-09 13:30:12 -09:00
2024-05-23 17:36:51 -04:00
notice_contents = NOTICE_FILE . read_text ( )
2021-06-15 07:54:50 -06:00
readme_text = textwrap . dedent ( """
2021-06-17 06:16:09 -06:00
# Prebuilt Security Detection Rules
2021-06-15 07:54:50 -06:00
The detection rules package stores the prebuilt security rules for the Elastic Security [detection engine](https://www.elastic.co/guide/en/security/7.13/detection-engine-overview.html).
To download or update the rules, click **Settings** > **Install Prebuilt Security Detection Rules assets**.
2025-02-03 21:27:50 +05:30
Then [import](https://www.elastic.co/guide/en/security/current/rules-ui-management.html#load-prebuilt-rules)
2021-06-15 07:54:50 -06:00
the rules into the Detection engine.
## License Notice
2025-07-01 15:20:55 +02:00
""" ) . lstrip ( )
2021-07-14 16:19:41 -06:00
# notice only needs to be appended to the README for 7.13.x
# in 7.14+ there's a separate modal to display this
if self . name == " 7.13 " :
2025-07-01 15:20:55 +02:00
notice_contents = textwrap . indent ( notice_contents , prefix = " " )
2021-03-09 13:30:12 -09:00
readme_file . write_text ( readme_text )
2021-06-15 07:54:50 -06:00
notice_file . write_text ( notice_contents )
2021-03-09 13:30:12 -09:00
2025-07-01 15:20:55 +02:00
def create_bulk_index_body ( self ) - > tuple [ Ndjson , Ndjson ] :
2021-02-10 10:37:26 -09:00
""" Create a body to bulk index into a stack. """
package_hash = self . get_package_hash ( verbose = False )
2025-07-01 15:20:55 +02:00
now = datetime . now ( UTC ) . isoformat ( )
create = { " create " : { " _index " : f " rules-repo- { self . name } - { package_hash } " } }
2021-02-10 10:37:26 -09:00
# first doc is summary stats
2025-07-01 15:20:55 +02:00
summary_doc : dict [ str , Any ] = {
" group_hash " : package_hash ,
" package_version " : self . name ,
" rule_count " : len ( self . rules ) ,
" rule_ids " : [ ] ,
" rule_names " : [ ] ,
" rule_hashes " : [ ] ,
" source " : " repo " ,
" details " : { " datetime_uploaded " : now } ,
2021-02-10 10:37:26 -09:00
}
bulk_upload_docs = Ndjson ( [ create , summary_doc ] )
importable_rules_docs = Ndjson ( )
for rule in self . rules :
2025-07-01 15:20:55 +02:00
summary_doc [ " rule_ids " ] . append ( rule . id )
summary_doc [ " rule_names " ] . append ( rule . name )
summary_doc [ " rule_hashes " ] . append ( rule . contents . get_hash ( ) )
2021-02-10 10:37:26 -09:00
2021-08-24 16:56:11 -06:00
if rule . id in self . new_ids :
2025-07-01 15:20:55 +02:00
status = " new "
2021-08-24 16:56:11 -06:00
elif rule . id in self . changed_ids :
2025-07-01 15:20:55 +02:00
status = " modified "
2021-02-10 10:37:26 -09:00
else :
2025-07-01 15:20:55 +02:00
status = " unmodified "
2021-02-10 10:37:26 -09:00
bulk_upload_docs . append ( create )
2023-07-27 11:27:04 -05:00
2024-08-06 18:07:12 -04:00
relative_path = str ( rule . get_base_rule_dir ( ) )
2025-07-01 15:20:55 +02:00
if not relative_path :
2024-08-06 18:07:12 -04:00
raise ValueError ( f " Could not find a valid relative path for the rule: { rule . id } " )
2023-07-27 11:27:04 -05:00
2025-07-01 15:20:55 +02:00
rule_doc = {
" hash " : rule . contents . get_hash ( ) ,
" source " : " repo " ,
" datetime_uploaded " : now ,
" status " : status ,
" package_version " : self . name ,
" flat_mitre " : ThreatMapping . flatten ( rule . contents . data . threat ) . to_dict ( ) ,
" relative_path " : relative_path ,
}
2023-03-02 06:30:55 -09:00
rule_doc . update ( * * rule . contents . to_api_format ( ) )
2021-02-10 10:37:26 -09:00
bulk_upload_docs . append ( rule_doc )
importable_rules_docs . append ( rule_doc )
return bulk_upload_docs , importable_rules_docs
2021-05-13 14:27:32 -06:00
2023-04-21 11:03:29 -04:00
@staticmethod
2025-07-01 15:20:55 +02:00
def add_historical_rules (
historical_rules : dict [ str , dict [ str , Any ] ] ,
manifest_version : str ,
) - > list [ dict [ str , Any ] ] | None :
2023-04-21 11:03:29 -04:00
""" Adds historical rules to existing build package. """
2025-07-01 15:20:55 +02:00
rules_dir = CURRENT_RELEASE_PATH / " fleet " / manifest_version / " kibana " / " security_rule "
2023-04-21 11:03:29 -04:00
# iterates over historical rules from previous package and writes them to disk
2025-07-01 15:20:55 +02:00
for historical_rule_contents in historical_rules . values ( ) :
2023-04-21 11:03:29 -04:00
rule_id = historical_rule_contents [ " attributes " ] [ " rule_id " ]
2025-07-01 15:20:55 +02:00
historical_rule_version = historical_rule_contents [ " attributes " ] [ " version " ]
2023-04-21 11:03:29 -04:00
# checks if the rule exists in the current package first
current_rule_path = list ( rules_dir . glob ( f " { rule_id } *.json " ) )
if not current_rule_path :
continue
# load the current rule from disk
current_rule_path = current_rule_path [ 0 ]
current_rule_json = json . load ( current_rule_path . open ( encoding = " UTF-8 " ) )
2025-07-01 15:20:55 +02:00
current_rule_version = current_rule_json [ " attributes " ] [ " version " ]
2023-04-21 11:03:29 -04:00
# if the historical rule version and current rules version differ, write
# the historical rule to disk
if historical_rule_version != current_rule_version :
2024-08-21 18:00:02 -04:00
historical_rule_path = rules_dir / f " { rule_id } _ { historical_rule_version } .json "
2023-04-21 11:03:29 -04:00
with historical_rule_path . open ( " w " , encoding = " UTF-8 " ) as file :
json . dump ( historical_rule_contents , file )
2021-05-13 14:27:32 -06:00
@cached
def current_stack_version ( ) - > str :
2025-07-01 15:20:55 +02:00
return Package . load_configs ( ) [ " name " ]