From b5ef2f5f02fbeb6d73c74bd1cecb102c1264725c Mon Sep 17 00:00:00 2001 From: Terrance DeJesus <99630311+terrancedejesus@users.noreply.github.com> Date: Fri, 21 Apr 2023 11:03:29 -0400 Subject: [PATCH] [FR] Generate Historical Rule Files in Build Release Packages (#2715) * adding solution for historical rules in release package * addressing flake errors * format changes * REVERT CHANGES - testing release-fleet workflow * REVERTING CHANGES * added historical flag for packaging to account for older branches * addressing flake errors * updated build for CI * REMOVE: This is temporary to run a workflow from this branch * updates to address requirements for contents * reverting packages.yml * Update detection_rules/integrations.py Co-authored-by: Mika Ayenson * Update detection_rules/integrations.py Co-authored-by: Mika Ayenson * addressed feedback and added click echo comments * addressed flake errors and added some comments --------- Co-authored-by: Mika Ayenson --- .github/workflows/pythonpackage.yml | 2 +- .github/workflows/release-fleet.yml | 15 ++++++++-- detection_rules/devtools.py | 19 ++++++++++-- detection_rules/integrations.py | 34 +++++++++++++++++++++ detection_rules/packaging.py | 46 +++++++++++++++++++++++++---- 5 files changed, 104 insertions(+), 12 deletions(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 1e20e2527..1cc1f7ec8 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -41,7 +41,7 @@ jobs: # only generate the navigator files on push events to main GENERATE_NAVIGATOR_FILES: "${{ github.event_name == 'push' && github.ref == 'refs/heads/main' && '--generate-navigator' || ' ' }}" run: | - python -m detection_rules dev build-release $GENERATE_NAVIGATOR_FILES + python -m detection_rules dev build-release $GENERATE_NAVIGATOR_FILES --add-historical 'no' - name: Archive production artifacts for branch builds uses: actions/upload-artifact@v2 diff --git a/.github/workflows/release-fleet.yml b/.github/workflows/release-fleet.yml index 68bee1828..15b53b34f 100644 --- a/.github/workflows/release-fleet.yml +++ b/.github/workflows/release-fleet.yml @@ -15,8 +15,8 @@ on: description: 'Create a PR as draft' required: false options: - - "y" - - "n" + - "yes" + - "no" package_maturity: type: choice description: 'Package Maturity' @@ -31,6 +31,13 @@ on: options: - "true" - "false" + add_historical: + type: choice + description: 'Add Historical Rules' + required: true + options: + - "yes" + - "no" commit_hash: description: 'Commit hash' required: true @@ -136,9 +143,11 @@ jobs: git push origin $RELEASE_TAG - name: Build release package + env: + HISTORICAL: "${{github.event.inputs.add_historical}}" run: | cd detection-rules - python -m detection_rules dev build-release + python -m detection_rules dev build-release --add-historical $HISTORICAL - name: Set github config run: | diff --git a/detection_rules/devtools.py b/detection_rules/devtools.py index c135ccbeb..3a05479a5 100644 --- a/detection_rules/devtools.py +++ b/detection_rules/devtools.py @@ -36,7 +36,8 @@ from .docs import IntegrationSecurityDocs from .endgame import EndgameSchemaManager from .eswrap import CollectEvents, add_range_to_dsl from .ghwrap import GithubClient, update_gist -from .integrations import (build_integrations_manifest, +from .integrations import (SecurityDetectionEngine, + build_integrations_manifest, build_integrations_schemas, find_latest_compatible_version, find_latest_integration_version, @@ -82,9 +83,13 @@ def dev_group(): @click.option('--update-version-lock', '-u', is_flag=True, help='Save version.lock.json file with updated rule versions in the package') @click.option('--generate-navigator', is_flag=True, help='Generate ATT&CK navigator files') -def build_release(config_file, update_version_lock: bool, generate_navigator: bool, release=None, verbose=True): +@click.option('--add-historical', type=str, required=True, help='Generate historical package-registry files') +def build_release(config_file, update_version_lock: bool, generate_navigator: bool, add_historical: str, + release=None, verbose=True): """Assemble all the rules into Kibana-ready release files.""" config = load_dump(config_file)['package'] + add_historical = True if add_historical == "yes" else False + if generate_navigator: config['generate_navigator'] = True @@ -94,13 +99,21 @@ def build_release(config_file, update_version_lock: bool, generate_navigator: bo if verbose: click.echo(f'[+] Building package {config.get("name")}') - package = Package.from_config(config, verbose=verbose) + package = Package.from_config(config, verbose=verbose, historical=add_historical) if update_version_lock: default_version_lock.manage_versions(package.rules, save_changes=True, verbose=verbose) package.save(verbose=verbose) + if add_historical: + previous_pkg_version = find_latest_integration_version("security_detection_engine", "ga", config['name']) + sde = SecurityDetectionEngine() + historical_rules = sde.load_integration_assets(previous_pkg_version) + historical_rules = sde.transform_legacy_assets(historical_rules) + click.echo(f'[+] Adding historical rules from {previous_pkg_version} package') + package.add_historical_rules(historical_rules, config['registry_data']['version']) + if verbose: package.get_package_hash(verbose=verbose) click.echo(f'- {len(package.rules)} rules included') diff --git a/detection_rules/integrations.py b/detection_rules/integrations.py index fec0b6353..55c958032 100644 --- a/detection_rules/integrations.py +++ b/detection_rules/integrations.py @@ -314,3 +314,37 @@ def get_integration_schema_data(data, meta, package_integrations: dict) -> Gener "stack_version": stack_version, "ecs_version": ecs_version, "package_version": package_version, "endgame_version": endgame_version} yield data + + +class SecurityDetectionEngine: + """Dedicated to Security Detection Engine integration.""" + + def __init__(self): + self.epr_url = "https://epr.elastic.co/package/security_detection_engine/" + + def load_integration_assets(self, package_version: Version) -> dict: + """Loads integration assets into memory.""" + + epr_package_url = f"{self.epr_url}{str(package_version)}/" + epr_response = requests.get(epr_package_url, timeout=10) + epr_response.raise_for_status() + package_obj = epr_response.json() + zip_url = f"https://epr.elastic.co{package_obj['download']}" + zip_response = requests.get(zip_url) + with unzip(zip_response.content) as zip_package: + asset_file_names = [asset for asset in zip_package.namelist() if "json" in asset] + assets = {x.split("/")[-1].replace(".json", ""): json.loads(zip_package.read(x).decode('utf-8')) + for x in asset_file_names} + return assets + + def transform_legacy_assets(self, assets: dict) -> dict: + """Transforms legacy rule assets to historical rules.""" + # this code can be removed after the 8.8 minor release + # epr prebuilt rule packages should have appropriate file names + + assets_transformed = {} + for asset_id, contents in assets.items(): + new_asset_id = f"{contents['attributes']['rule_id']}_{contents['attributes']['version']}" + contents["id"] = new_asset_id + assets_transformed[new_asset_id] = contents + return assets_transformed diff --git a/detection_rules/packaging.py b/detection_rules/packaging.py index 8fc16fdab..0aafd49ed 100644 --- a/detection_rules/packaging.py +++ b/detection_rules/packaging.py @@ -77,7 +77,7 @@ class Package(object): def __init__(self, rules: RuleCollection, name: str, release: Optional[bool] = False, min_version: Optional[int] = None, max_version: Optional[int] = None, registry_data: Optional[dict] = None, verbose: Optional[bool] = True, - generate_navigator: bool = False): + generate_navigator: bool = False, historical: bool = False): """Initialize a package.""" self.name = name self.rules = rules @@ -85,6 +85,7 @@ class Package(object): self.release = release self.registry_data = registry_data or {} self.generate_navigator = generate_navigator + self.historical = historical if min_version is not None: self.rules = self.rules.filter(lambda r: min_version <= r.contents.latest_version) @@ -221,7 +222,7 @@ class Package(object): return sha256 @classmethod - def from_config(cls, config: dict = None, verbose: bool = False) -> 'Package': + def from_config(cls, config: dict = None, verbose: bool = False, historical: bool = False) -> 'Package': """Load a rules package given a config.""" all_rules = RuleCollection.default() config = config or {} @@ -238,7 +239,7 @@ class Package(object): if verbose: click.echo(f' - {len(all_rules) - len(rules)} rules excluded from package') - package = cls(rules, verbose=verbose, **config) + package = cls(rules, verbose=verbose, historical=historical, **config) return package @@ -399,8 +400,16 @@ class Package(object): # shutil.copyfile(CHANGELOG_FILE, str(rules_dir.joinpath('CHANGELOG.json'))) for rule in self.rules: - asset_path = rules_dir / f'{rule.id}.json' - asset_path.write_text(json.dumps(rule.get_asset(), indent=4, sort_keys=True), encoding="utf-8") + asset = rule.get_asset() + if self.historical: + # if this package includes historical rules the IDs need to be changed + # asset['id] and the file name needs to resemble RULEID_VERSION instead of RULEID + asset_id = f"{asset['attributes']['rule_id']}_{asset['attributes']['version']}" + asset["id"] = asset_id + asset_path = rules_dir / f'{asset_id}.json' + else: + asset_path = rules_dir / f'{asset["id"]}.json' + asset_path.write_text(json.dumps(asset, indent=4, sort_keys=True), encoding="utf-8") notice_contents = Path(NOTICE_FILE).read_text() readme_text = textwrap.dedent(""" @@ -470,6 +479,33 @@ class Package(object): return bulk_upload_docs, importable_rules_docs + @staticmethod + def add_historical_rules(historical_rules: Dict[str, dict], manifest_version: str) -> list: + """Adds historical rules to existing build package.""" + rules_dir = CURRENT_RELEASE_PATH / 'fleet' / manifest_version / 'kibana' / 'security_rule' + + # iterates over historical rules from previous package and writes them to disk + for historical_rule_id, historical_rule_contents in historical_rules.items(): + rule_id = historical_rule_contents["attributes"]["rule_id"] + historical_rule_version = historical_rule_contents['attributes']['version'] + + # checks if the rule exists in the current package first + current_rule_path = list(rules_dir.glob(f"{rule_id}*.json")) + if not current_rule_path: + continue + + # load the current rule from disk + current_rule_path = current_rule_path[0] + current_rule_json = json.load(current_rule_path.open(encoding="UTF-8")) + current_rule_version = current_rule_json['attributes']['version'] + + # if the historical rule version and current rules version differ, write + # the historical rule to disk + if historical_rule_version != current_rule_version: + historical_rule_path = rules_dir / f"{historical_rule_id}.json" + with historical_rule_path.open("w", encoding="UTF-8") as file: + json.dump(historical_rule_contents, file) + @cached def current_stack_version() -> str: