[FR] Adapt PyPi semver Library and Remove Custom (#2503)

* removed custom semver and replaced with pypi

* updated beats.py version references

* updated bump-versions CLI command to use semver and change logic

* updated schemas __init__, test_version_lock and unstage incompatible rules CLI

* updated test_stack_schema_map in TestVersions unittest

* updated test_all_rules unit testing Version() references

* updated stack_compat.py for get_restricted_field references)

* updated version_lock.py Version() references

* updated docs.py Version() reference for parse_registry

* updated devtools.py Version() reference for trim-version-lock

* updated mixins.py Version() reference in validate_field_compatibility

* adjusted schemas.__init__ Version() reference in get_stack_schemas

* adjusted ecs.py Version() references

* adjusted integrations.py Version() references

* adjusted rule.py Version() references

* sorted imports

* replaced custom semver with pypi semver in unit test files

* addressed unit test and flake errors

* changed semver strings casted to version_lock.py

* fixed sorting in integrations.py

* updated bump-pkgs-versions CLI command

* adjusted semantic version in unstage-incompatible-rules command

* adjusted semver import to VersionInfo

* added semver 3 and adjusted import names

* added option_minor_and_patch parameter where version is major.minor

* updated bump-pkg-versions to always save to packages.yml

* removed leftover split call & updated find latest compatible version command

* updated integrations.py, version_lock.py and schemas.__init__.py

* changed fstring reference in downgrade function

* reverted formatting changes for detection_rules __init__.py

* added newline to detection_rules __init__.py

* adjusted finding latest_release for attack package logic

* adjusted unstage-incompatible-rules command logic comparing versions

* removing changes from misc.py related to auto-formatting

* adding newline to misc.py

* fixed bug in downgrade function calling decorators

* added semantic version validation on migrate decorator function

* added expected type returned from find_latest_integration_version in integrations.py

* add comment about stripped versions for version lock file

Co-authored-by: Mika Ayenson <Mikaayenson@users.noreply.github.com>

---------

Co-authored-by: Mika Ayenson <Mikaayenson@users.noreply.github.com>
This commit is contained in:
Terrance DeJesus
2023-02-07 14:26:29 -05:00
committed by GitHub
parent 9ce8faebea
commit fb2b4529c5
16 changed files with 214 additions and 224 deletions
+6 -5
View File
@@ -13,7 +13,7 @@ import json
import requests
from collections import OrderedDict
from .semver import Version
from semver import Version
from .utils import cached, clear_caches, get_etc_path, get_etc_glob_path, read_gzip, gzip_compress
PLATFORMS = ['Windows', 'macOS', 'Linux']
@@ -105,16 +105,17 @@ def refresh_attack_data(save=True) -> (Optional[dict], Optional[bytes]):
_, version = name.lower().split(pattern, 1)
return version
current_version = get_version_from_tag(filename, 'attack-v')
current_version = Version.parse(get_version_from_tag(filename, 'attack-v'), optional_minor_and_patch=True)
r = requests.get('https://api.github.com/repos/mitre/cti/tags')
r.raise_for_status()
releases = [t for t in r.json() if t['name'].startswith('ATT&CK-v')]
latest_release = max(releases, key=lambda release: Version(get_version_from_tag(release['name'])))
latest_release = max(releases, key=lambda release: Version.parse(get_version_from_tag(release['name']),
optional_minor_and_patch=True))
release_name = latest_release['name']
latest_version = get_version_from_tag(release_name)
latest_version = Version.parse(get_version_from_tag(release_name), optional_minor_and_patch=True)
if Version(current_version) >= Version(latest_version):
if current_version >= latest_version:
print(f'No versions newer than the current detected: {current_version}')
return None, None
+9 -7
View File
@@ -4,18 +4,20 @@
# 2.0.
"""ECS Schemas management."""
import json
import os
import re
from typing import List, Optional
import kql
import eql
import json
import requests
from semver import Version
import yaml
from .semver import Version
from .utils import DateTimeEncoder, unzip, get_etc_path, gzip_compress, read_gzip, cached
import kql
from .utils import (DateTimeEncoder, cached, get_etc_path, gzip_compress,
read_gzip, unzip)
def _decompress_and_save_schema(url, release_name):
@@ -91,7 +93,7 @@ def download_latest_beats_schema():
url = 'https://api.github.com/repos/elastic/beats/releases'
releases = requests.get(url)
latest_release = max(releases.json(), key=lambda release: Version(release["tag_name"].lstrip("v")))
latest_release = max(releases.json(), key=lambda release: Version.parse(release["tag_name"].lstrip("v")))
download_beats_schema(latest_release["tag_name"])
@@ -198,7 +200,7 @@ def get_versions() -> List[Version]:
for filename in os.listdir(get_etc_path("beats_schemas")):
version_match = re.match(r'v(.+)\.json\.gz', filename)
if version_match:
versions.append(Version(version_match.groups()[0]))
versions.append(Version.parse(version_match.groups()[0]))
return versions
@@ -213,7 +215,7 @@ def read_beats_schema(version: str = None):
if version and version.lower() == 'main':
return json.loads(read_gzip(get_etc_path('beats_schemas', 'main.json.gz')))
version = Version(version) if version else None
version = Version.parse(version) if version else None
beats_schemas = get_versions()
if version and version not in beats_schemas:
+50 -42
View File
@@ -21,6 +21,7 @@ from typing import Dict, List, Optional, Tuple
import click
import requests.exceptions
from semver import Version
import yaml
from elasticsearch import Elasticsearch
from eql.table import Table
@@ -33,7 +34,10 @@ from .docs import IntegrationSecurityDocs
from .endgame import EndgameSchemaManager
from .eswrap import CollectEvents, add_range_to_dsl
from .ghwrap import GithubClient, update_gist
from .integrations import (build_integrations_manifest, build_integrations_schemas, find_latest_compatible_version,
from .integrations import (build_integrations_manifest,
build_integrations_schemas,
find_latest_compatible_version,
find_latest_integration_version,
load_integrations_manifests)
from .main import root
from .misc import PYTHON_LICENSE, add_client, client_error
@@ -43,9 +47,8 @@ from .rule import (AnyRuleData, BaseRuleData, DeprecatedRule, QueryRuleData,
ThreatMapping, TOMLRule)
from .rule_loader import RuleCollection, production_filter
from .schemas import definitions, get_stack_versions
from .semver import Version
from .utils import (dict_hash, get_etc_path, get_path, load_dump, save_etc_dump,
load_etc_dump)
from .utils import (dict_hash, get_etc_path, get_path, load_dump,
load_etc_dump, save_etc_dump)
from .version_lock import VersionLockFile, default_version_lock
RULES_DIR = get_path('rules')
@@ -152,43 +155,45 @@ def build_integration_docs(ctx: click.Context, registry_version: str, pre: str,
return docs
@dev_group.command("bump-versions")
@click.option("--major", is_flag=True, help="bump the major version")
@click.option("--minor", is_flag=True, help="bump the minor version")
@click.option("--patch", is_flag=True, help="bump the patch version")
@click.option("--package", is_flag=True, help="Update the package version in the packages.yml file")
@click.option("--kibana", is_flag=True, help="Update the kibana version in the packages.yml file")
@click.option("--registry", is_flag=True, help="Update the registry version in the packages.yml file")
def bump_versions(major, minor, patch, package, kibana, registry):
@dev_group.command("bump-pkg-versions")
@click.option("--major-release", is_flag=True, help="bump the major version")
@click.option("--minor-release", is_flag=True, help="bump the minor version")
@click.option("--patch-release", is_flag=True, help="bump the patch version")
@click.option("--maturity", type=click.Choice(['beta', 'ga'], case_sensitive=False),
required=True, help="beta or production versions")
def bump_versions(major_release: bool, minor_release: bool, patch_release: bool, maturity: str):
"""Bump the versions"""
package_data = load_etc_dump('packages.yml')['package']
ver = package_data["name"]
new_version = Version(ver).bump(major, minor, patch)
pkg_data = load_etc_dump('packages.yml')['package']
kibana_ver = Version.parse(pkg_data["name"], optional_minor_and_patch=True)
pkg_ver = Version.parse(pkg_data["registry_data"]["version"])
pkg_kibana_ver = Version.parse(pkg_data["registry_data"]["conditions"]["kibana.version"].lstrip("^"))
if major_release:
pkg_data["name"] = str(kibana_ver.bump_major()).rstrip(".0")
pkg_data["registry_data"]["conditions"]["kibana.version"] = f"^{pkg_kibana_ver.bump_major()}"
pkg_data["registry_data"]["version"] = str(pkg_ver.bump_major().bump_prerelease("beta"))
if minor_release:
pkg_data["name"] = str(kibana_ver.bump_minor()).rstrip(".0")
pkg_data["registry_data"]["conditions"]["kibana.version"] = f"^{pkg_kibana_ver.bump_minor()}"
pkg_data["registry_data"]["version"] = str(pkg_ver.bump_minor().bump_prerelease("beta"))
pkg_data["registry_data"]["release"] = maturity
if patch_release:
latest_patch_release_ver = find_latest_integration_version("security_detection_engine",
maturity, pkg_data["name"])
if maturity == "ga":
pkg_data["registry_data"]["version"] = str(latest_patch_release_ver.bump_patch())
pkg_data["registry_data"]["release"] = maturity
else:
pkg_data["registry_data"]["version"] = str(latest_patch_release_ver.bump_prerelease("beta"))
pkg_data["registry_data"]["release"] = maturity
kibana_version = f"^{new_version}.0" if not patch else f"^{new_version}"
registry_version = f"{new_version}.0-dev.0" if not patch else f"{new_version}-dev.0"
click.echo(f"Kibana version: {pkg_data['name']}")
click.echo(f"Package Kibana version: {pkg_data['registry_data']['conditions']['kibana.version']}")
click.echo(f"Package version: {pkg_data['registry_data']['version']}")
# print the new versions
click.echo(f"New package version: {new_version}")
click.echo(f"New registry data version: {registry_version}")
click.echo(f"New Kibana version: {kibana_version}")
if package:
# update package version
package_data["name"] = str(new_version)
if kibana:
# update kibana version
package_data["registry_data"]["conditions"]["kibana.version"] = kibana_version
if registry:
# update registry version
package_data["registry_data"]["version"] = registry_version
# update packages.yml
if package or kibana or registry:
save_etc_dump({"package": package_data}, "packages.yml")
# we only save major and minor version bumps
# patch version bumps are OOB packages and thus we keep the base versioning
save_etc_dump({"package": pkg_data}, "packages.yml")
@dataclasses.dataclass
@@ -249,7 +254,7 @@ def prune_staging_area(target_stack_version: str, dry_run: bool, exception_list:
}
exceptions.update(exception_list.split(","))
target_stack_version = Version(target_stack_version)[:2]
target_stack_version = Version.parse(target_stack_version, optional_minor_and_patch=True)
# load a structured summary of the diff from git
git_output = subprocess.check_output(["git", "diff", "--name-status", "HEAD"])
@@ -270,7 +275,8 @@ def prune_staging_area(target_stack_version: str, dry_run: bool, exception_list:
dict_contents = RuleCollection.deserialize_toml_string(change.read())
min_stack_version: Optional[str] = dict_contents.get("metadata", {}).get("min_stack_version")
if min_stack_version is not None and target_stack_version < Version(min_stack_version)[:2]:
if min_stack_version is not None and \
(target_stack_version < Version.parse(min_stack_version, optional_minor_and_patch=True)):
# rule is incompatible, add to the list of reversions to make later
reversions.append(change)
@@ -896,13 +902,13 @@ def trim_version_lock(min_version: str, dry_run: bool):
stack_versions = get_stack_versions()
assert min_version in stack_versions, f'Unknown min_version ({min_version}), expected: {", ".join(stack_versions)}'
min_version = Version(min_version)
min_version = Version.parse(min_version)
version_lock_dict = default_version_lock.version_lock.to_dict()
removed = {}
for rule_id, lock in version_lock_dict.items():
if 'previous' in lock:
prev_vers = [Version(v) for v in list(lock['previous'])]
prev_vers = [Version.parse(v, optional_minor_and_patch=True) for v in list(lock['previous'])]
outdated_vers = [v for v in prev_vers if v <= min_version]
if not outdated_vers:
@@ -1212,7 +1218,9 @@ def show_latest_compatible_version(package: str, stack_version: str) -> None:
return
try:
version = find_latest_compatible_version(package, "", stack_version, packages_manifest)
version = find_latest_compatible_version(package, "",
Version.parse(stack_version, optional_minor_and_patch=True),
packages_manifest)
click.echo(f"Compatible integration {version=}")
except Exception as e:
click.echo(f"Error finding compatible version: {str(e)}")
+4 -4
View File
@@ -5,6 +5,7 @@
"""Create summary documents for a rule package."""
import itertools
import json
import re
import shutil
import textwrap
@@ -13,14 +14,13 @@ from datetime import datetime
from pathlib import Path
from typing import Dict, Iterable, Optional, Union
import json
from semver import Version
import xlsxwriter
from .attack import attack_tm, matrix, tactics, technique_lookup
from .packaging import Package
from .rule_loader import DeprecatedCollection, RuleCollection
from .rule import ThreatMapping, TOMLRule
from .semver import Version
from .rule_loader import DeprecatedCollection, RuleCollection
class PackageDocument(xlsxwriter.Workbook):
@@ -304,7 +304,7 @@ class IntegrationSecurityDocs:
@staticmethod
def parse_registry(registry_version: str) -> (str, str, str):
registry_version = Version(registry_version)
registry_version = Version.parse(registry_version)
short_registry_version = [str(n) for n in registry_version[:3]]
registry_version_str = '.'.join(short_registry_version)
base_name = "-".join(short_registry_version)
+9 -8
View File
@@ -6,18 +6,19 @@
"""ECS Schemas management."""
import copy
import glob
import json
import os
import shutil
import json
from pathlib import Path
import requests
import eql
import eql.types
import requests
from semver import Version
import yaml
from .semver import Version
from .utils import DateTimeEncoder, cached, load_etc_dump, get_etc_path, gzip_compress, read_gzip, unzip
from .utils import (DateTimeEncoder, cached, get_etc_path, gzip_compress,
load_etc_dump, read_gzip, unzip)
ETC_NAME = "ecs_schemas"
ECS_SCHEMAS_DIR = get_etc_path(ETC_NAME)
@@ -87,7 +88,7 @@ def get_max_version(include_master=False):
if include_master and any([v.startswith('master') for v in versions]):
return list(Path(ECS_SCHEMAS_DIR).glob('master*'))[0].name
return str(max([Version(v) for v in versions if not v.startswith('master')]))
return str(max([Version.parse(v) for v in versions if not v.startswith('master')]))
@cached
@@ -205,12 +206,12 @@ def get_kql_schema(version=None, indexes=None, beat_schema=None) -> dict:
def download_schemas(refresh_master=True, refresh_all=False, verbose=True):
"""Download additional schemas from ecs releases."""
existing = [Version(v) for v in get_schema_map()] if not refresh_all else []
existing = [Version.parse(v) for v in get_schema_map()] if not refresh_all else []
url = 'https://api.github.com/repos/elastic/ecs/releases'
releases = requests.get(url)
for release in releases.json():
version = Version(release.get('tag_name', '').lstrip('v'))
version = Version.parse(release.get('tag_name', '').lstrip('v'))
# we don't ever want beta
if not version or version < (1, 0, 1) or version in existing:
@@ -247,7 +248,7 @@ def download_schemas(refresh_master=True, refresh_all=False, verbose=True):
# handle working master separately
if refresh_master:
master_ver = requests.get('https://raw.githubusercontent.com/elastic/ecs/master/version')
master_ver = Version(master_ver.text.strip())
master_ver = Version.parse(master_ver.text.strip())
master_schema = requests.get('https://raw.githubusercontent.com/elastic/ecs/master/generated/ecs/ecs_flat.yml')
master_schema = yaml.safe_load(master_schema.text)
+42 -24
View File
@@ -13,6 +13,7 @@ from pathlib import Path
from typing import Generator, Tuple, Union
import requests
from semver import Version
import yaml
from marshmallow import EXCLUDE, Schema, fields, post_load
@@ -21,7 +22,6 @@ import kql
from . import ecs
from .beats import flatten_ecs_schema
from .misc import load_current_package_version
from .semver import Version
from .utils import cached, get_etc_path, read_gzip, unzip
MANIFEST_FILE_PATH = Path(get_etc_path('integration-manifests.json.gz'))
@@ -138,31 +138,34 @@ def find_least_compatible_version(package: str, integration: str,
current_stack_version: str, packages_manifest: dict) -> str:
"""Finds least compatible version for specified integration based on stack version supplied."""
integration_manifests = {k: v for k, v in sorted(packages_manifest[package].items(),
key=lambda x: Version(str(x[0])))}
key=lambda x: Version.parse(x[0]))}
current_stack_version = Version.parse(current_stack_version, optional_minor_and_patch=True)
# filter integration_manifests to only the latest major entries
major_versions = sorted(list(set([Version(manifest_version)[0] for manifest_version in integration_manifests])),
reverse=True)
major_versions = sorted(list(set([Version.parse(manifest_version).major
for manifest_version in integration_manifests])), reverse=True)
for max_major in major_versions:
major_integration_manifests = \
{k: v for k, v in integration_manifests.items() if Version(k)[0] == max_major}
{k: v for k, v in integration_manifests.items() if Version.parse(k).major == max_major}
# iterates through ascending integration manifests
# returns latest major version that is least compatible
for version, manifest in OrderedDict(sorted(major_integration_manifests.items(),
key=lambda x: Version(str(x[0])))).items():
key=lambda x: Version.parse(x[0]))).items():
compatible_versions = re.sub(r"\>|\<|\=|\^", "", manifest["conditions"]["kibana"]["version"]).split(" || ")
for kibana_ver in compatible_versions:
kibana_ver = Version.parse(kibana_ver)
# check versions have the same major
if int(kibana_ver[0]) == int(current_stack_version[0]):
if Version(kibana_ver) <= Version(current_stack_version + ".0"):
if kibana_ver.major == current_stack_version.major:
if kibana_ver <= current_stack_version:
return f"^{version}"
raise ValueError(f"no compatible version for integration {package}:{integration}")
def find_latest_compatible_version(package: str, integration: str,
rule_stack_version: str, packages_manifest: dict) -> Union[None, Tuple[str, str]]:
rule_stack_version: Version,
packages_manifest: dict) -> Union[None, Tuple[str, str]]:
"""Finds least compatible version for specified integration based on stack version supplied."""
if not package:
@@ -173,7 +176,7 @@ def find_latest_compatible_version(package: str, integration: str,
raise ValueError(f"Package {package} not found in manifest.")
# Converts the dict keys (version numbers) to Version objects for proper sorting (descending)
integration_manifests = sorted(package_manifest.items(), key=lambda x: Version(str(x[0])), reverse=True)
integration_manifests = sorted(package_manifest.items(), key=lambda x: Version.parse(x[0]), reverse=True)
notice = ""
for version, manifest in integration_manifests:
@@ -187,9 +190,10 @@ def find_latest_compatible_version(package: str, integration: str,
if not compatible_versions:
raise ValueError(f"Manifest for {package}:{integration} version {version} is missing compatible versions")
highest_compatible_version = max(compatible_versions, key=lambda x: Version(x))
highest_compatible_version = Version.parse(max(compatible_versions,
key=lambda x: Version.parse(x)))
if Version(highest_compatible_version) > Version(rule_stack_version):
if highest_compatible_version > rule_stack_version:
# generate notice message that a later integration version is available
integration = f" {integration.strip()}" if integration else ""
@@ -197,44 +201,61 @@ def find_latest_compatible_version(package: str, integration: str,
f"Update the rule min_stack version from {rule_stack_version} to "
f"{highest_compatible_version} if using new features in this latest version.")
if int(highest_compatible_version[0]) == int(rule_stack_version[0]):
if highest_compatible_version.major == rule_stack_version.major:
return version, notice
else:
# Check for rules that cross majors
for compatible_version in compatible_versions:
if Version(compatible_version) <= Version(rule_stack_version):
if Version.parse(compatible_version) <= rule_stack_version:
return version, notice
raise ValueError(f"no compatible version for integration {package}:{integration}")
def get_integration_manifests(integration: str) -> list:
def get_integration_manifests(integration: str, prerelease: str, kibana_version: str) -> list:
"""Iterates over specified integrations from package-storage and combines manifests per version."""
epr_search_url = "https://epr.elastic.co/search"
if not prerelease:
prerelease = "false"
# link for search parameters - https://github.com/elastic/package-registry
epr_search_parameters = {"package": f"{integration}", "prerelease": "false",
epr_search_parameters = {"package": f"{integration}", "prerelease": prerelease,
"all": "true", "include_policy_templates": "true"}
epr_search_response = requests.get(epr_search_url, params=epr_search_parameters)
if kibana_version:
epr_search_parameters["kibana.version"] = kibana_version
epr_search_response = requests.get(epr_search_url, params=epr_search_parameters, timeout=10)
epr_search_response.raise_for_status()
manifests = epr_search_response.json()
if not manifests:
raise ValueError(f"EPR search for {integration} integration package returned empty list")
sorted_manifests = sorted(manifests, key=lambda p: Version.parse(p["version"]), reverse=True)
print(f"loaded {integration} manifests from the following package versions: "
f"{[manifest['version'] for manifest in manifests]}")
f"{[manifest['version'] for manifest in sorted_manifests]}")
return manifests
def find_latest_integration_version(integration: str, maturity: str, stack_version: Version) -> Version:
"""Finds the latest integration version based on maturity and stack version"""
prerelease = "false" if maturity == "ga" else "true"
existing_pkgs = get_integration_manifests(integration, prerelease, str(stack_version))
if maturity == "ga":
existing_pkgs = [pkg for pkg in existing_pkgs if not
Version.parse(pkg["version"]).prerelease]
if maturity == "beta":
existing_pkgs = [pkg for pkg in existing_pkgs if
Version.parse(pkg["version"]).prerelease]
return max([Version.parse(pkg["version"]) for pkg in existing_pkgs])
def get_integration_schema_data(data, meta, package_integrations: dict) -> Generator[dict, None, None]:
"""Iterates over specified integrations from package-storage and combines schemas per version."""
# lazy import to avoid circular import
from .rule import ( # pylint: disable=import-outside-toplevel
QueryRuleData, RuleMeta
)
QueryRuleData, RuleMeta)
data: QueryRuleData = data
meta: RuleMeta = meta
@@ -260,10 +281,7 @@ def get_integration_schema_data(data, meta, package_integrations: dict) -> Gener
# Use the minimum stack version from the package not the rule
min_stack = meta.min_stack_version or load_current_package_version()
# Prior to 8.3, some rules had a min_stack_version with only major.minor
if Version(min_stack) != 3:
min_stack = Version(Version(load_current_package_version()) + (0,))
min_stack = Version.parse(min_stack, optional_minor_and_patch=True)
package_version, notice = find_latest_compatible_version(package=package,
integration=integration,
+2 -2
View File
@@ -18,7 +18,7 @@ from marshmallow import Schema, ValidationError, fields, validates_schema
from .misc import load_current_package_version
from .schemas import definitions
from .schemas.stack_compat import get_incompatible_fields
from .semver import Version
from semver import Version
from .utils import cached, dict_hash
T = TypeVar('T')
@@ -180,7 +180,7 @@ class StackCompatMixin:
@validates_schema
def validate_field_compatibility(self, data: dict, **kwargs):
"""Verify stack-specific fields are properly applied to schema."""
package_version = Version(load_current_package_version())
package_version = Version.parse(load_current_package_version(), optional_minor_and_patch=True)
schema_fields = getattr(self, 'fields', {})
incompatible = get_incompatible_fields(list(schema_fields.values()), package_version)
if not incompatible:
+22 -22
View File
@@ -16,12 +16,14 @@ from typing import Any, Dict, List, Literal, Optional, Tuple, Union
from uuid import uuid4
import eql
import kql
from kql.ast import FieldComparison
from semver import Version
from marko.block import Document as MarkoDocument
from marko.ext.gfm import gfm
from marshmallow import ValidationError, validates_schema
import kql
from kql.ast import FieldComparison
from . import beats, ecs, endgame, utils
from .integrations import (find_least_compatible_version,
load_integrations_manifests)
@@ -31,16 +33,15 @@ from .rule_formatter import nested_normalize, toml_write
from .schemas import (SCHEMA_DIR, definitions, downgrade,
get_min_supported_stack_version, get_stack_schemas)
from .schemas.stack_compat import get_restricted_fields
from .semver import Version
from .utils import cached
_META_SCHEMA_REQ_DEFAULTS = {}
MIN_FLEET_PACKAGE_VERSION = '7.13.0'
BUILD_FIELD_VERSIONS = {
"related_integrations": (Version('8.3'), None),
"required_fields": (Version('8.3'), None),
"setup": (Version("8.3"), None)
"related_integrations": (Version.parse('8.3.0'), None),
"required_fields": (Version.parse('8.3.0'), None),
"setup": (Version.parse('8.3.0'), None)
}
@@ -352,7 +353,7 @@ class QueryValidator:
@cached
def get_required_fields(self, index: str) -> List[dict]:
"""Retrieves fields needed for the query along with type information from the schema."""
current_version = Version(Version(load_current_package_version()) + (0,))
current_version = Version.parse(load_current_package_version(), optional_minor_and_patch=True)
ecs_version = get_stack_schemas()[str(current_version)]['ecs']
beats_version = get_stack_schemas()[str(current_version)]['beats']
endgame_version = get_stack_schemas()[str(current_version)]['endgame']
@@ -487,8 +488,8 @@ class NewTermsRuleData(QueryRuleData):
kql_validator = KQLValidator(self.query)
kql_validator.validate(self, meta)
feature_min_stack = Version('8.4.0')
feature_min_stack_extended_fields = Version('8.6.0')
feature_min_stack = Version.parse('8.4.0')
feature_min_stack_extended_fields = Version.parse('8.6.0')
# validate history window start field exists and is correct
assert self.new_terms.history_window_start, \
@@ -503,9 +504,9 @@ class NewTermsRuleData(QueryRuleData):
# ecs validation
min_stack_version = meta.get("min_stack_version")
if min_stack_version is None:
min_stack_version = Version(Version(load_current_package_version()) + (0,))
min_stack_version = Version.parse(load_current_package_version(), optional_minor_and_patch=True)
else:
min_stack_version = Version(min_stack_version)
min_stack_version = Version.parse(min_stack_version)
assert min_stack_version >= feature_min_stack, \
f"New Terms rule types only compatible with {feature_min_stack}+"
@@ -677,8 +678,8 @@ class BaseRuleContents(ABC):
@property
def is_dirty(self) -> Optional[bool]:
"""Determine if the rule has changed since its version was locked."""
min_stack = self.get_supported_version()
existing_sha256 = self.version_lock.get_locked_hash(self.id, min_stack)
min_stack = Version.parse(self.get_supported_version())
existing_sha256 = self.version_lock.get_locked_hash(self.id, str(min_stack).rstrip(".0"))
if existing_sha256 is not None:
return existing_sha256 != self.sha256()
@@ -702,8 +703,8 @@ class BaseRuleContents(ABC):
"""Determine if the rule is in a forked version."""
if not self.has_forked:
return False
locked_min_stack = Version(self.lock_entry['min_stack_version'])
current_package_ver = Version(load_current_package_version())
locked_min_stack = Version.parse(self.lock_entry['min_stack_version'], optional_minor_and_patch=True)
current_package_ver = Version.parse(load_current_package_version(), optional_minor_and_patch=True)
return current_package_ver < locked_min_stack
def get_version_space(self) -> Optional[int]:
@@ -733,11 +734,10 @@ class BaseRuleContents(ABC):
@classmethod
def convert_supported_version(cls, stack_version: Optional[str]) -> Version:
"""Convert an optional stack version to the minimum for the lock in the form major.minor."""
min_version = get_min_supported_stack_version(drop_patch=True)
min_version = get_min_supported_stack_version()
if stack_version is None:
return min_version
short_stack_version = Version(Version(stack_version)[:2])
return max(short_stack_version, min_version)
return max(Version.parse(stack_version, optional_minor_and_patch=True), min_version)
def get_supported_version(self) -> str:
"""Get the lowest stack version for the rule that is currently supported in the form major.minor."""
@@ -947,9 +947,9 @@ class TOMLRuleContents(BaseRuleContents, MarshmallowDataclassMixin):
@staticmethod
def compare_field_versions(min_stack: Version, max_stack: Version) -> bool:
"""Check current rule version is within min and max stack versions."""
current_version = Version(load_current_package_version())
current_version = Version.parse(load_current_package_version(), optional_minor_and_patch=True)
max_stack = max_stack or current_version
return Version(min_stack) <= current_version >= Version(max_stack)
return min_stack <= current_version >= max_stack
@classmethod
def get_packaged_integrations(cls, data: QueryRuleData, meta: RuleMeta,
@@ -1020,9 +1020,9 @@ class TOMLRuleContents(BaseRuleContents, MarshmallowDataclassMixin):
def check_restricted_fields_compatibility(self) -> Dict[str, dict]:
"""Check for compatibility between restricted fields and the min_stack_version of the rule."""
default_min_stack = get_min_supported_stack_version(drop_patch=True)
default_min_stack = get_min_supported_stack_version()
if self.metadata.min_stack_version is not None:
min_stack = Version(self.metadata.min_stack_version)
min_stack = Version.parse(self.metadata.min_stack_version)
else:
min_stack = default_min_stack
restricted = self.data.get_restricted_fields
+23 -23
View File
@@ -5,16 +5,16 @@
import json
from collections import OrderedDict
from pathlib import Path
from typing import List, Optional, OrderedDict as OrderedDictType
from typing import List, Optional
from typing import OrderedDict as OrderedDictType
import jsonschema
from semver import Version
from ..misc import load_current_package_version
from ..utils import cached, get_etc_path, load_etc_dump
from . import definitions
from .rta_schema import validate_rta_mapping
from ..misc import load_current_package_version
from ..semver import Version
from ..utils import cached, get_etc_path, load_etc_dump
__all__ = (
"SCHEMA_DIR",
@@ -38,7 +38,9 @@ def all_versions() -> List[str]:
def migrate(version: str):
"""Decorator to set a migration."""
version = Version(version)
# checks that the migrate decorator name is semi-semantic versioned
# raises validation error from semver if not
Version.parse(version, optional_minor_and_patch=True)
def wrapper(f):
assert version not in migrations
@@ -231,19 +233,19 @@ def downgrade(api_contents: dict, target_version: str, current_version: Optional
if current_version is None:
current_version = current_stack_version()
current_major, current_minor = Version(current_version)[:2]
target_major, target_minor = Version(target_version)[:2]
current = Version.parse(current_version, optional_minor_and_patch=True)
target = Version.parse(target_version, optional_minor_and_patch=True)
# get all the versions between current_semver and target_semver
if target_major != current_major:
raise ValueError(f"Cannot backport to major version {target_major}")
if target.major != current.major:
raise ValueError(f"Cannot backport to major version {target.major}")
for minor in reversed(range(target_minor, current_minor)):
version = Version([target_major, minor])
for minor in reversed(range(target.minor, current.minor)):
version = f"{target.major}.{minor}"
if version not in migrations:
raise ValueError(f"Missing migration for {target_version}")
api_contents = migrations[version](version, api_contents)
api_contents = migrations[str(version)](version, api_contents)
return api_contents
@@ -256,15 +258,13 @@ def load_stack_schema_map() -> dict:
@cached
def get_stack_schemas(stack_version: Optional[str] = '0.0.0') -> OrderedDictType[str, dict]:
"""Return all ECS + beats to stack versions for every stack version >= specified stack version and <= package."""
stack_version = Version(stack_version or '0.0.0')
current_package = Version(load_current_package_version())
if len(current_package) == 2:
current_package = Version(current_package + (0,))
stack_version = Version.parse(stack_version or '0.0.0', optional_minor_and_patch=True)
current_package = Version.parse(load_current_package_version(), optional_minor_and_patch=True)
stack_map = load_stack_schema_map()
versions = {k: v for k, v in stack_map.items()
if (mapped_version := Version(k)) >= stack_version and mapped_version <= current_package and v}
versions = {k: v for k, v in stack_map.items() if
(((mapped_version := Version.parse(k)) >= stack_version)
and (mapped_version <= current_package) and v)} # noqa: W503
if stack_version > current_package:
versions[stack_version] = {'beats': 'main', 'ecs': 'master'}
@@ -287,8 +287,8 @@ def get_stack_versions(drop_patch=False) -> List[str]:
@cached
def get_min_supported_stack_version(drop_patch=False) -> Version:
def get_min_supported_stack_version() -> Version:
"""Get the minimum defined and supported stack version."""
stack_map = load_stack_schema_map()
min_version = min(Version(v) for v in list(stack_map))
return Version(min_version[:2]) if drop_patch else min_version
min_version = min([Version.parse(v) for v in list(stack_map)])
return min_version
+8 -5
View File
@@ -6,8 +6,9 @@
from dataclasses import Field
from typing import Dict, List, Optional, Tuple
from semver import Version
from ..misc import cached
from ..semver import Version
@cached
@@ -17,13 +18,14 @@ def get_restricted_field(schema_field: Field) -> Tuple[Optional[Version], Option
# marshmallow_dataclass passes the embedded metadata directly
min_compat = schema_field.metadata.get('metadata', schema_field.metadata).get('min_compat')
max_compat = schema_field.metadata.get('metadata', schema_field.metadata).get('max_compat')
min_compat = Version(min_compat) if min_compat else None
max_compat = Version(max_compat) if max_compat else None
min_compat = Version.parse(min_compat, optional_minor_and_patch=True) if min_compat else None
max_compat = Version.parse(max_compat, optional_minor_and_patch=True) if max_compat else None
return min_compat, max_compat
@cached
def get_restricted_fields(schema_fields: List[Field]) -> Dict[str, Tuple[Optional[Version], Optional[Version]]]:
def get_restricted_fields(schema_fields: List[Field]) -> Dict[str, Tuple[Optional[Version],
Optional[Version]]]:
"""Get a list of optional min and max compatible versions of fields (from a schema or dataclass)."""
restricted = {}
for _field in schema_fields:
@@ -35,7 +37,8 @@ def get_restricted_fields(schema_fields: List[Field]) -> Dict[str, Tuple[Optiona
@cached
def get_incompatible_fields(schema_fields: List[Field], package_version: Version) -> Optional[Dict[str, tuple]]:
def get_incompatible_fields(schema_fields: List[Field], package_version: Version) -> \
Optional[Dict[str, tuple]]:
"""Get a list of fields that are incompatible with the package version."""
if not schema_fields:
return
-49
View File
@@ -1,49 +0,0 @@
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License
# 2.0; you may not use this file except in compliance with the Elastic License
# 2.0.
"""Helper functionality for comparing semantic versions."""
import re
from typing import Iterable, Union
class Version(tuple):
def __new__(cls, version: Union[str, Iterable]) -> 'Version':
if isinstance(version, (int, list, tuple)):
version_class = tuple.__new__(cls, version)
else:
version_tuple = tuple(int(a) if a.isdigit() else a for a in re.split(r'[.-]', version))
version_class = tuple.__new__(cls, version_tuple)
return version_class
def __str__(self):
"""Convert back to a string."""
recovered_str = str(self[0])
for additional in self[1:]:
if isinstance(additional, str):
recovered_str += "-" + additional
else:
recovered_str += "." + str(additional)
return recovered_str
def bump(self, major: bool = False, minor: bool = False, patch: bool = False) -> 'Version':
"""Increment the version."""
versions = list(self)
if major:
versions[0] += 1
if minor:
versions[1] += 1
if patch and len(versions) > 2:
versions[-1] += 1
elif patch and len(versions) == 2:
versions.append(1)
return Version(versions)
def max_versions(*versions: str) -> str:
"""Return the max versioned string."""
return str(max([Version(v) for v in versions]))
+13 -10
View File
@@ -9,11 +9,11 @@ from pathlib import Path
from typing import ClassVar, Dict, List, Optional, Union
import click
from semver import Version
from .mixins import LockDataclassMixin, MarshmallowDataclassMixin
from .rule_loader import RuleCollection
from .schemas import definitions
from .semver import Version
from .utils import cached, get_etc_path
ETC_VERSION_LOCK_FILE = "version.lock.json"
@@ -202,7 +202,7 @@ class VersionLock:
for rule in rules:
if rule.contents.metadata.maturity == "production" or rule.id in newly_deprecated:
# assume that older stacks are always locked first
min_stack = Version(rule.contents.get_supported_version())
min_stack = Version.parse(rule.contents.get_supported_version())
lock_from_rule = rule.contents.lock_info(bump=not exclude_version_update)
lock_from_file: dict = lock_file_contents.setdefault(rule.id, {})
@@ -224,6 +224,9 @@ class VersionLock:
latest_locked_stack_version = rule.contents.convert_supported_version(
lock_from_file.get("min_stack_version"))
# strip version down to only major.minor to compare against lock file versioning
stripped_version = f"{min_stack.major}.{min_stack.minor}"
if not lock_from_file or min_stack == latest_locked_stack_version:
route = 'A'
# 1) no breaking changes ever made or the first time a rule is created
@@ -233,7 +236,7 @@ class VersionLock:
# add the min_stack_version to the lock if it's explicitly set
if rule.contents.metadata.min_stack_version is not None:
lock_from_file["min_stack_version"] = str(min_stack)
lock_from_file["min_stack_version"] = stripped_version
log_msg = f'min_stack_version added: {min_stack}'
log_changes(rule, route, new_version, log_msg)
@@ -259,26 +262,26 @@ class VersionLock:
lock_from_file["previous"][str(latest_locked_stack_version)] = previous_lock_info
# overwrite the "latest" part of the lock at the top level
lock_from_file.update(lock_from_rule, min_stack_version=str(min_stack))
lock_from_file.update(lock_from_rule, min_stack_version=stripped_version)
new_version = lock_from_rule['version']
log_changes(
rule, route, new_version,
f'previous {latest_locked_stack_version} saved as version: {previous_lock_info["version"]}',
f'current min_stack updated to {min_stack}'
f'current min_stack updated to {stripped_version}'
)
elif min_stack < latest_locked_stack_version:
route = 'C'
# 4) on an old stack, after a breaking change has been made (updated fork)
assert str(min_stack) in lock_from_file.get("previous", {}), \
f"Expected {rule.id} @ v{min_stack} in the rule lock"
assert stripped_version in lock_from_file.get("previous", {}), \
f"Expected {rule.id} @ v{stripped_version} in the rule lock"
# TODO: Figure out whether we support locking old versions and if we want to
# "leave room" by skipping versions when breaking changes are made.
# We can still inspect the version lock manually after locks are made,
# since it's a good summary of everything that happens
previous_entry = lock_from_file["previous"][str(min_stack)]
previous_entry = lock_from_file["previous"][stripped_version]
max_allowable_version = previous_entry['max_allowable_version']
# if version bump collides with future bump: fail
@@ -291,10 +294,10 @@ class VersionLock:
f'exceed the max allowable version of {max_allowable_version}')
if info_from_rule != info_from_file:
lock_from_file["previous"][str(min_stack)].update(lock_from_rule)
lock_from_file["previous"][stripped_version].update(lock_from_rule)
new_version = lock_from_rule["version"]
log_changes(rule, route, 'unchanged',
f'previous version {min_stack} updated version to {new_version}')
f'previous version {stripped_version} updated version to {new_version}')
continue
else:
raise RuntimeError("Unreachable code")
+2 -1
View File
@@ -37,7 +37,8 @@ dependencies = [
"requests~=2.27",
"toml==0.10.0",
"typing-inspect==0.7.1",
"XlsxWriter~=1.3.6"
"XlsxWriter~=1.3.6",
"semver==3.0.0-dev.4"
]
[project.optional-dependencies]
dev = ["pep8-naming==0.7.0", "PyGithub==1.55", "flake8==3.8.1", "pyflakes==2.2.0", "pytest>=3.6", "pre-commit==2.20.0"]
+10 -9
View File
@@ -6,11 +6,13 @@
"""Test that all rules have valid metadata and syntax."""
import os
import re
import warnings
import unittest
import warnings
from collections import defaultdict
from pathlib import Path
from semver import Version
import kql
from detection_rules import attack
from detection_rules.beats import parse_beats_from_index
@@ -19,14 +21,13 @@ from detection_rules.rule import (QueryRuleData, TOMLRuleContents,
load_integrations_manifests)
from detection_rules.rule_loader import FILE_PATTERN
from detection_rules.schemas import definitions
from detection_rules.semver import Version
from detection_rules.utils import INTEGRATION_RULE_DIR, get_path, load_etc_dump
from detection_rules.version_lock import default_version_lock
from rta import get_available_tests
from .base import BaseRuleTest
PACKAGE_STACK_VERSION = Version(current_stack_version()) + (0,)
PACKAGE_STACK_VERSION = Version.parse(current_stack_version(), optional_minor_and_patch=True)
class TestValidRules(BaseRuleTest):
@@ -426,19 +427,18 @@ class TestRuleMetadata(BaseRuleTest):
# f'Re-add to the deprecated folder and update maturity to "deprecated": \n {missing_rule_strings}'
# self.assertEqual([], missing_rules, err_msg)
stack_version = Version(current_stack_version())
for rule_id, entry in deprecations.items():
# if a rule is deprecated and not backported in order to keep the rule active in older branches, then it
# will exist in the deprecated_rules.json file and not be in the _deprecated folder - this is expected.
# However, that should not occur except by exception - the proper way to handle this situation is to
# "fork" the existing rule by adding a new min_stack_version.
if stack_version < Version(entry['stack_version']):
if PACKAGE_STACK_VERSION < Version.parse(entry['stack_version'], optional_minor_and_patch=True):
continue
rule_str = f'{rule_id} - {entry["rule_name"]} ->'
self.assertIn(rule_id, deprecated_rules, f'{rule_str} is logged in "deprecated_rules.json" but is missing')
@unittest.skipIf(PACKAGE_STACK_VERSION < Version("8.3.0"),
@unittest.skipIf(PACKAGE_STACK_VERSION < Version.parse("8.3.0"),
"Test only applicable to 8.3+ stacks regarding related integrations build time field.")
def test_integration_tag(self):
"""Test integration rules defined by metadata tag."""
@@ -617,7 +617,8 @@ class TestRuleTiming(BaseRuleTest):
has_event_ingested = rule.contents.data.timestamp_override == 'event.ingested'
indexes = rule.contents.data.get('index', [])
beats_indexes = parse_beats_from_index(indexes)
min_stack_is_less_than_82 = Version(rule.contents.metadata.min_stack_version or '7.13') < (8, 2)
min_stack_is_less_than_82 = Version.parse(rule.contents.metadata.min_stack_version or '7.13.0') \
< Version.parse("8.2.0")
config = rule.contents.data.get('note') or ''
rule_str = self.rule_str(rule, trailer=None)
@@ -752,7 +753,7 @@ class TestBuildTimeFields(BaseRuleTest):
def test_build_fields_min_stack(self):
"""Test that newly introduced build-time fields for a min_stack for applicable rules."""
current_stack_ver = Version(current_stack_version())
current_stack_ver = PACKAGE_STACK_VERSION
invalids = []
for rule in self.production_rules:
@@ -763,7 +764,7 @@ class TestBuildTimeFields(BaseRuleTest):
for build_field, field_versions in build_fields.items():
start_ver, end_ver = field_versions
if start_ver is not None and current_stack_ver >= start_ver:
if min_stack is None or not Version(min_stack) >= start_ver:
if min_stack is None or not Version.parse(min_stack) >= start_ver:
errors.append(f'{build_field} >= {start_ver}')
if errors:
+9 -9
View File
@@ -7,13 +7,13 @@
import copy
import unittest
import uuid
from semver import Version
import eql
from detection_rules import utils
from detection_rules.misc import load_current_package_version
from detection_rules.rule import TOMLRuleContents
from detection_rules.schemas import downgrade
from detection_rules.semver import Version
from detection_rules.version_lock import VersionLockFile
from marshmallow import ValidationError
@@ -99,7 +99,7 @@ class TestSchemas(unittest.TestCase):
def test_query_downgrade_7_x(self):
"""Downgrade a standard KQL rule."""
if Version(self.current_version) > (7,):
if Version.parse(self.current_version, optional_minor_and_patch=True).major > 7:
return
self.assertDictEqual(downgrade(self.v711_kql, "7.11"), self.v711_kql)
@@ -120,7 +120,7 @@ class TestSchemas(unittest.TestCase):
def test_versioned_downgrade_7_x(self):
"""Downgrade a KQL rule with version information"""
if Version(self.current_version) > (7,):
if Version.parse(self.current_version, optional_minor_and_patch=True).major > 7:
return
api_contents = self.v79_kql
@@ -138,7 +138,7 @@ class TestSchemas(unittest.TestCase):
def test_threshold_downgrade_7_x(self):
"""Downgrade a threshold rule that was first introduced in 7.9."""
if Version(self.current_version) > (7,):
if Version.parse(self.current_version, optional_minor_and_patch=True).major > 7:
return
api_contents = self.v712_threshold_rule
@@ -167,17 +167,17 @@ class TestSchemas(unittest.TestCase):
def test_query_downgrade_8_x(self):
"""Downgrade a standard KQL rule."""
if Version(self.current_version) > (8,):
if Version.parse(self.current_version, optional_minor_and_patch=True).major > 8:
return
def test_versioned_downgrade_8_x(self):
"""Downgrade a KQL rule with version information"""
if Version(self.current_version) > (8,):
if Version.parse(self.current_version, optional_minor_and_patch=True).major > 8:
return
def test_threshold_downgrade_8_x(self):
"""Downgrade a threshold rule that was first introduced in 7.9."""
if Version(self.current_version) > (8,):
if Version.parse(self.current_version, optional_minor_and_patch=True).major > 7:
return
def test_eql_validation(self):
@@ -285,7 +285,7 @@ class TestVersions(unittest.TestCase):
def test_stack_schema_map(self):
"""Test to ensure that an entry exists in the stack-schema-map for the current package version."""
package_version = Version(load_current_package_version())
package_version = Version.parse(load_current_package_version(), optional_minor_and_patch=True)
stack_map = utils.load_etc_dump('stack-schema-map.yaml')
err_msg = f'There is no entry defined for the current package ({package_version}) in the stack-schema-map'
self.assertIn(package_version, [Version(v)[:2] for v in stack_map], err_msg)
self.assertIn(package_version, [Version.parse(v) for v in stack_map], err_msg)
+5 -4
View File
@@ -7,8 +7,9 @@
import unittest
from semver import Version
from detection_rules.schemas import get_min_supported_stack_version
from detection_rules.semver import Version
from detection_rules.version_lock import default_version_lock
@@ -18,11 +19,11 @@ class TestVersionLock(unittest.TestCase):
def test_previous_entries_gte_current_min_stack(self):
"""Test that all previous entries for all locks in the version lock are >= the current min_stack."""
errors = {}
min_version = get_min_supported_stack_version(drop_patch=True)
min_version = get_min_supported_stack_version()
for rule_id, lock in default_version_lock.version_lock.to_dict().items():
if 'previous' in lock:
prev_vers = [Version(v) for v in list(lock['previous'])]
outdated = [str(v) for v in prev_vers if v < min_version]
prev_vers = [Version.parse(v, optional_minor_and_patch=True) for v in list(lock['previous'])]
outdated = [str(v).lstrip(".0") for v in prev_vers if v < min_version]
if outdated:
errors[rule_id] = outdated