[Bug] CLI Fixes (#1073)
* add support for self-signed certs in es and kibana * allow Kibana to auth against any providerType * fix export-rules command * fix kibana upload-rule command * fix view-rule command * fix validate-rule command * fix search-rules command * fix dev kibana-diff command * fix dev package-stats command * fix dev search-rule-prs command * fix dev deprecate-rule command * replace toml with pytoml to fix import-rules command * use no_verify in get_kibana_client * use Path for rule-file type in view-rule * update schemas to resolve additionalProperties type bug * fix missing unique_fields in package rule filter * fix github pr loader * Load gh rules as TOMLRule instead of dict * remove unnecessary version insertion
This commit is contained in:
+41
-32
@@ -15,7 +15,7 @@ import textwrap
|
||||
import time
|
||||
import typing
|
||||
from pathlib import Path
|
||||
from typing import Optional, Tuple, List
|
||||
from typing import Dict, Optional, Tuple, List
|
||||
|
||||
import click
|
||||
import yaml
|
||||
@@ -32,6 +32,7 @@ from .packaging import PACKAGE_FILE, Package, RELEASE_DIR, current_stack_version
|
||||
from .version_lock import manage_versions, load_versions
|
||||
from .rule import AnyRuleData, BaseRuleData, QueryRuleData, TOMLRule
|
||||
from .rule_loader import RuleCollection, production_filter
|
||||
from .schemas import definitions
|
||||
from .semver import Version
|
||||
from .utils import dict_hash, get_path, load_dump
|
||||
|
||||
@@ -212,8 +213,6 @@ def kibana_diff(rule_id, repo, branch, threads):
|
||||
else:
|
||||
rules = rules.filter(production_filter).id_map
|
||||
|
||||
# add versions to the rules
|
||||
manage_versions(list(rules.values()), verbose=False)
|
||||
repo_hashes = {r.id: r.contents.sha256(include_version=True) for r in rules.values()}
|
||||
|
||||
kibana_rules = {r['rule_id']: r for r in get_kibana_rules(repo=repo, branch=branch, threads=threads).values()}
|
||||
@@ -594,32 +593,39 @@ def search_rule_prs(ctx, no_loop, query, columns, language, token, threads):
|
||||
from uuid import uuid4
|
||||
from .main import search_rules
|
||||
|
||||
all_rules = {}
|
||||
all_rules: Dict[Path, TOMLRule] = {}
|
||||
new, modified, errors = rule_loader.load_github_pr_rules(token=token, threads=threads)
|
||||
|
||||
def add_github_meta(this_rule, status, original_rule_id=None):
|
||||
def add_github_meta(this_rule: TOMLRule, status: str, original_rule_id: Optional[definitions.UUIDString] = None):
|
||||
pr = this_rule.gh_pr
|
||||
rule.metadata['status'] = status
|
||||
rule.metadata['github'] = {
|
||||
'base': pr.base.label,
|
||||
'comments': [c.body for c in pr.get_comments()],
|
||||
'commits': pr.commits,
|
||||
'created_at': str(pr.created_at),
|
||||
'head': pr.head.label,
|
||||
'is_draft': pr.draft,
|
||||
'labels': [lbl.name for lbl in pr.get_labels()],
|
||||
'last_modified': str(pr.last_modified),
|
||||
'title': pr.title,
|
||||
'url': pr.html_url,
|
||||
'user': pr.user.login
|
||||
data = rule.contents.data
|
||||
extend_meta = {
|
||||
'status': status,
|
||||
'github': {
|
||||
'base': pr.base.label,
|
||||
'comments': [c.body for c in pr.get_comments()],
|
||||
'commits': pr.commits,
|
||||
'created_at': str(pr.created_at),
|
||||
'head': pr.head.label,
|
||||
'is_draft': pr.draft,
|
||||
'labels': [lbl.name for lbl in pr.get_labels()],
|
||||
'last_modified': str(pr.last_modified),
|
||||
'title': pr.title,
|
||||
'url': pr.html_url,
|
||||
'user': pr.user.login
|
||||
}
|
||||
}
|
||||
|
||||
if original_rule_id:
|
||||
rule.metadata['original_rule_id'] = original_rule_id
|
||||
rule.contents['rule_id'] = str(uuid4())
|
||||
extend_meta['original_rule_id'] = original_rule_id
|
||||
data = dataclasses.replace(rule.contents.data, rule_id=str(uuid4()))
|
||||
|
||||
rule_path = f'pr-{pr.number}-{rule.path}'
|
||||
all_rules[rule_path] = rule.rule_format()
|
||||
rule_path = Path(f'pr-{pr.number}-{rule.path}')
|
||||
new_meta = dataclasses.replace(rule.contents.metadata, extended=extend_meta)
|
||||
contents = dataclasses.replace(rule.contents, metadata=new_meta, data=data)
|
||||
new_rule = TOMLRule(path=rule_path, contents=contents)
|
||||
|
||||
all_rules[new_rule.path] = new_rule
|
||||
|
||||
for rule_id, rule in new.items():
|
||||
add_github_meta(rule, 'new')
|
||||
@@ -638,32 +644,35 @@ def search_rule_prs(ctx, no_loop, query, columns, language, token, threads):
|
||||
|
||||
|
||||
@dev_group.command('deprecate-rule')
|
||||
@click.argument('rule-file', type=click.Path(dir_okay=False))
|
||||
@click.argument('rule-file', type=Path)
|
||||
@click.pass_context
|
||||
def deprecate_rule(ctx: click.Context, rule_file: str):
|
||||
def deprecate_rule(ctx: click.Context, rule_file: Path):
|
||||
"""Deprecate a rule."""
|
||||
import pytoml
|
||||
|
||||
version_info = load_versions()
|
||||
rule_file = Path(rule_file)
|
||||
contents = pytoml.loads(rule_file.read_text())
|
||||
rule_collection = RuleCollection()
|
||||
contents = rule_collection.load_file(rule_file).contents
|
||||
rule = TOMLRule(path=rule_file, contents=contents)
|
||||
|
||||
if rule.id not in version_info:
|
||||
if rule.contents.id not in version_info:
|
||||
click.echo('Rule has not been version locked and so does not need to be deprecated. '
|
||||
'Delete the file or update the maturity to `development` instead')
|
||||
ctx.exit()
|
||||
|
||||
today = time.strftime('%Y/%m/%d')
|
||||
|
||||
new_meta = {
|
||||
'updated_date': today,
|
||||
'deprecation_date': today,
|
||||
'maturity': 'deprecated'
|
||||
}
|
||||
deprecated_path = get_path('rules', '_deprecated', rule_file.name)
|
||||
|
||||
# create the new rule and save it
|
||||
new_meta = dataclasses.replace(rule.contents.metadata,
|
||||
updated_date=today,
|
||||
deprecation_date=today,
|
||||
maturity='deprecated')
|
||||
contents = dataclasses.replace(rule.contents, metadata=new_meta)
|
||||
deprecated_path = get_path('rules', '_deprecated', rule_file.name)
|
||||
|
||||
# create the new rule and save it
|
||||
new_rule = TOMLRule(contents=contents, path=Path(deprecated_path))
|
||||
new_rule.save_toml()
|
||||
|
||||
|
||||
@@ -39,7 +39,6 @@ def kibana_group(ctx: click.Context, **kibana_kwargs):
|
||||
@click.pass_context
|
||||
def upload_rule(ctx, rules, replace_id):
|
||||
"""Upload a list of rule .toml files to Kibana."""
|
||||
|
||||
kibana = ctx.obj['kibana']
|
||||
api_payloads = []
|
||||
|
||||
@@ -60,8 +59,22 @@ def upload_rule(ctx, rules, replace_id):
|
||||
api_payloads.append(rule)
|
||||
|
||||
with kibana:
|
||||
rules = RuleResource.bulk_create(api_payloads)
|
||||
click.echo(f"Successfully uploaded {len(rules)} rules")
|
||||
results = RuleResource.bulk_create(api_payloads)
|
||||
|
||||
success = []
|
||||
errors = []
|
||||
for result in results:
|
||||
if 'error' in result:
|
||||
errors.append(f'{result["rule_id"]} - {result["error"]["message"]}')
|
||||
else:
|
||||
success.append(result['rule_id'])
|
||||
|
||||
if success:
|
||||
click.echo('Successful uploads:\n - ' + '\n - '.join(success))
|
||||
if errors:
|
||||
click.echo('Failed uploads:\n - ' + '\n - '.join(errors))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@kibana_group.command('search-alerts')
|
||||
|
||||
+56
-18
@@ -12,7 +12,7 @@ import re
|
||||
import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
from typing import Dict, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
import click
|
||||
@@ -22,7 +22,7 @@ from .misc import add_client, client_error, nested_set, parse_config
|
||||
from .rule import TOMLRule, TOMLRuleContents
|
||||
from .rule_formatter import toml_write
|
||||
from .rule_loader import RuleCollection
|
||||
from .schemas import all_versions
|
||||
from .schemas import all_versions, definitions
|
||||
from .utils import get_path, get_etc_path, clear_caches, load_dump, load_rule_contents
|
||||
|
||||
RULES_DIR = get_path('rules')
|
||||
@@ -41,7 +41,7 @@ def root(ctx, debug):
|
||||
|
||||
|
||||
@root.command('create-rule')
|
||||
@click.argument('path', type=click.Path(dir_okay=False))
|
||||
@click.argument('path', type=Path)
|
||||
@click.option('--config', '-c', type=click.Path(exists=True, dir_okay=False), help='Rule or config file')
|
||||
@click.option('--required-only', is_flag=True, help='Only prompt for required fields')
|
||||
@click.option('--rule-type', '-t', type=click.Choice(sorted(TOMLRuleContents.all_rule_types())),
|
||||
@@ -95,7 +95,7 @@ def import_rules(input_file, directory):
|
||||
|
||||
rule_contents = []
|
||||
for rule_file in rule_files:
|
||||
rule_contents.extend(load_rule_contents(rule_file))
|
||||
rule_contents.extend(load_rule_contents(Path(rule_file)))
|
||||
|
||||
if not rule_contents:
|
||||
click.echo('Must specify at least one file!')
|
||||
@@ -156,7 +156,7 @@ def mass_update(ctx, query, metadata, language, field):
|
||||
|
||||
|
||||
@root.command('view-rule')
|
||||
@click.argument('rule-file')
|
||||
@click.argument('rule-file', type=Path)
|
||||
@click.option('--api-format/--rule-format', default=True, help='Print the rule in final api or rule format')
|
||||
@click.pass_context
|
||||
def view_rule(ctx, rule_file, api_format):
|
||||
@@ -168,21 +168,57 @@ def view_rule(ctx, rule_file, api_format):
|
||||
else:
|
||||
click.echo(toml_write(rule.contents.to_dict()))
|
||||
|
||||
return rule
|
||||
|
||||
|
||||
def _export_rules(rules: RuleCollection, outfile: Path, downgrade_version: Optional[definitions.SemVer] = None,
|
||||
verbose=True, skip_unsupported=False):
|
||||
"""Export rules into a consolidated ndjson file."""
|
||||
from .rule import downgrade_contents_from_rule
|
||||
|
||||
outfile = outfile.with_suffix('.ndjson')
|
||||
unsupported = []
|
||||
|
||||
if downgrade_version:
|
||||
if skip_unsupported:
|
||||
output_lines = []
|
||||
|
||||
for rule in rules:
|
||||
try:
|
||||
output_lines.append(json.dumps(downgrade_contents_from_rule(rule, downgrade_version),
|
||||
sort_keys=True))
|
||||
except ValueError as e:
|
||||
unsupported.append(f'{e}: {rule.id} - {rule.name}')
|
||||
continue
|
||||
|
||||
else:
|
||||
output_lines = [json.dumps(downgrade_contents_from_rule(r, downgrade_version), sort_keys=True)
|
||||
for r in rules]
|
||||
else:
|
||||
output_lines = [json.dumps(r.contents.to_api_format(), sort_keys=True) for r in rules]
|
||||
|
||||
outfile.write_text('\n'.join(output_lines) + '\n')
|
||||
|
||||
if verbose:
|
||||
click.echo(f'Exported {len(rules) - len(unsupported)} rules into {outfile}')
|
||||
|
||||
if skip_unsupported and unsupported:
|
||||
unsupported_str = '\n- '.join(unsupported)
|
||||
click.echo(f'Skipped {len(unsupported)} unsupported rules: \n- {unsupported_str}')
|
||||
|
||||
|
||||
@root.command('export-rules')
|
||||
@multi_collection
|
||||
@click.option('--outfile', '-o', default=get_path('exports', f'{time.strftime("%Y%m%dT%H%M%SL")}.ndjson'),
|
||||
type=click.Path(dir_okay=False), help='Name of file for exported rules')
|
||||
@click.option('--outfile', '-o', default=Path(get_path('exports', f'{time.strftime("%Y%m%dT%H%M%SL")}.ndjson')),
|
||||
type=Path, help='Name of file for exported rules')
|
||||
@click.option('--replace-id', '-r', is_flag=True, help='Replace rule IDs with new IDs before export')
|
||||
@click.option('--stack-version', type=click.Choice(all_versions()),
|
||||
help='Downgrade a rule version to be compatible with older instances of Kibana')
|
||||
@click.option('--skip-unsupported', '-s', is_flag=True,
|
||||
help='If `--stack-version` is passed, skip rule types which are unsupported '
|
||||
'(an error will be raised otherwise)')
|
||||
def export_rules(rules, outfile, replace_id, stack_version, skip_unsupported) -> RuleCollection:
|
||||
def export_rules(rules, outfile: Path, replace_id, stack_version, skip_unsupported) -> RuleCollection:
|
||||
"""Export rule(s) into an importable ndjson file."""
|
||||
from .packaging import Package
|
||||
|
||||
assert len(rules) > 0, "No rules found"
|
||||
|
||||
if replace_id:
|
||||
@@ -196,10 +232,11 @@ def export_rules(rules, outfile, replace_id, stack_version, skip_unsupported) ->
|
||||
new_contents = dataclasses.replace(rule.contents, data=new_data)
|
||||
rules.add_rule(TOMLRule(contents=new_contents))
|
||||
|
||||
Path(outfile).parent.mkdir(exist_ok=True)
|
||||
package = Package(rules, '_', verbose=False)
|
||||
package.export(outfile, downgrade_version=stack_version, skip_unsupported=skip_unsupported)
|
||||
return package.rules
|
||||
outfile.parent.mkdir(exist_ok=True)
|
||||
_export_rules(rules=rules, outfile=outfile, downgrade_version=stack_version,
|
||||
skip_unsupported=skip_unsupported)
|
||||
|
||||
return rules
|
||||
|
||||
|
||||
@root.command('validate-rule')
|
||||
@@ -231,13 +268,14 @@ def search_rules(query, columns, language, count, verbose=True, rules: Dict[str,
|
||||
from eql.build import get_engine
|
||||
from eql import parse_query
|
||||
from eql.pipes import CountPipe
|
||||
from .rule import get_unique_query_fields
|
||||
|
||||
flattened_rules = []
|
||||
rules = rules or {str(rule.path): rule for rule in RuleCollection.default()}
|
||||
|
||||
for file_name, rule_doc in rules.items():
|
||||
for file_name, rule in rules.items():
|
||||
flat: dict = {"file": os.path.relpath(file_name)}
|
||||
flat.update(rule_doc.contents.to_dict())
|
||||
flat.update(rule.contents.to_dict())
|
||||
flat.update(flat["metadata"])
|
||||
flat.update(flat["rule"])
|
||||
|
||||
@@ -254,8 +292,8 @@ def search_rules(query, columns, language, count, verbose=True, rules: Dict[str,
|
||||
technique_ids.extend([t['id'] for t in techniques])
|
||||
subtechnique_ids.extend([st['id'] for t in techniques for st in t.get('subtechnique', [])])
|
||||
|
||||
flat.update(techniques=technique_ids, tactics=tactic_names, subtechniques=subtechnique_ids)
|
||||
# unique_fields=TOMLRule.get_unique_query_fields(rule_doc['rule']))
|
||||
flat.update(techniques=technique_ids, tactics=tactic_names, subtechniques=subtechnique_ids,
|
||||
unique_fields=get_unique_query_fields(rule))
|
||||
flattened_rules.append(flat)
|
||||
|
||||
flattened_rules.sort(key=lambda dct: dct["name"])
|
||||
|
||||
+24
-6
@@ -283,6 +283,7 @@ def get_elasticsearch_client(cloud_id=None, elasticsearch_url=None, es_user=None
|
||||
es_password = es_password or click.prompt("es_password", hide_input=True)
|
||||
hosts = [elasticsearch_url] if elasticsearch_url else None
|
||||
timeout = kwargs.pop('timeout', 60)
|
||||
kwargs['verify_certs'] = not kwargs.pop('ignore_ssl_errors', False)
|
||||
|
||||
try:
|
||||
client = Elasticsearch(hosts=hosts, cloud_id=cloud_id, http_auth=(es_user, es_password), timeout=timeout,
|
||||
@@ -295,8 +296,10 @@ def get_elasticsearch_client(cloud_id=None, elasticsearch_url=None, es_user=None
|
||||
client_error(error_msg, e, ctx=ctx, err=True)
|
||||
|
||||
|
||||
def get_kibana_client(cloud_id, kibana_url, kibana_user, kibana_password, kibana_cookie, **kwargs):
|
||||
def get_kibana_client(cloud_id, kibana_url, kibana_user, kibana_password, kibana_cookie, space, ignore_ssl_errors,
|
||||
provider_type, provider_name, **kwargs):
|
||||
"""Get an authenticated Kibana client."""
|
||||
from requests import HTTPError
|
||||
from kibana import Kibana
|
||||
|
||||
if not (cloud_id or kibana_url):
|
||||
@@ -307,11 +310,22 @@ def get_kibana_client(cloud_id, kibana_url, kibana_user, kibana_password, kibana
|
||||
kibana_user = kibana_user or click.prompt("kibana_user")
|
||||
kibana_password = kibana_password or click.prompt("kibana_password", hide_input=True)
|
||||
|
||||
with Kibana(cloud_id=cloud_id, kibana_url=kibana_url, **kwargs) as kibana:
|
||||
verify = not ignore_ssl_errors
|
||||
|
||||
with Kibana(cloud_id=cloud_id, kibana_url=kibana_url, space=space, verify=verify, **kwargs) as kibana:
|
||||
if kibana_cookie:
|
||||
kibana.add_cookie(kibana_cookie)
|
||||
else:
|
||||
kibana.login(kibana_user, kibana_password)
|
||||
return kibana
|
||||
|
||||
try:
|
||||
kibana.login(kibana_user, kibana_password, provider_type=provider_type, provider_name=provider_name)
|
||||
except HTTPError as exc:
|
||||
if exc.response.status_code == 401:
|
||||
err_msg = f'Authentication failed for {kibana_url}. If credentials are valid, check --provider-name'
|
||||
client_error(err_msg, exc, err=True)
|
||||
else:
|
||||
raise
|
||||
|
||||
return kibana
|
||||
|
||||
|
||||
@@ -323,14 +337,18 @@ client_options = {
|
||||
'kibana_password': click.Option(['--kibana-password', '-kp'], default=getdefault('kibana_password')),
|
||||
'kibana_url': click.Option(['--kibana-url'], default=getdefault('kibana_url')),
|
||||
'kibana_user': click.Option(['--kibana-user', '-ku'], default=getdefault('kibana_user')),
|
||||
'space': click.Option(['--space'], default=None, help='Kibana space')
|
||||
'provider_type': click.Option(['--provider-type'], default=getdefault('provider_type')),
|
||||
'provider_name': click.Option(['--provider-name'], default=getdefault('provider_name')),
|
||||
'space': click.Option(['--space'], default=None, help='Kibana space'),
|
||||
'ignore_ssl_errors': click.Option(['--ignore-ssl-errors'], default=getdefault('ignore_ssl_errors'))
|
||||
},
|
||||
'elasticsearch': {
|
||||
'cloud_id': click.Option(['--cloud-id'], default=getdefault("cloud_id")),
|
||||
'elasticsearch_url': click.Option(['--elasticsearch-url'], default=getdefault("elasticsearch_url")),
|
||||
'es_user': click.Option(['--es-user', '-eu'], default=getdefault("es_user")),
|
||||
'es_password': click.Option(['--es-password', '-ep'], default=getdefault("es_password")),
|
||||
'timeout': click.Option(['--timeout', '-et'], default=60, help='Timeout for elasticsearch client')
|
||||
'timeout': click.Option(['--timeout', '-et'], default=60, help='Timeout for elasticsearch client'),
|
||||
'ignore_ssl_errors': click.Option(['--ignore-ssl-errors'], default=getdefault('ignore_ssl_errors'))
|
||||
}
|
||||
}
|
||||
kibana_options = list(client_options['kibana'].values())
|
||||
|
||||
@@ -20,7 +20,6 @@ import yaml
|
||||
|
||||
from .misc import JS_LICENSE, cached
|
||||
from .rule import TOMLRule, QueryRuleData, ThreatMapping
|
||||
from .rule import downgrade_contents_from_rule
|
||||
from .rule_loader import DeprecatedCollection, RuleCollection, DEFAULT_RULES_DIR
|
||||
from .schemas import definitions
|
||||
from .utils import Ndjson, get_path, get_etc_path, load_etc_dump
|
||||
@@ -55,10 +54,15 @@ def filter_rule(rule: TOMLRule, config_filter: dict, exclude_fields: Optional[di
|
||||
return False
|
||||
|
||||
exclude_fields = exclude_fields or {}
|
||||
for index, fields in exclude_fields.items():
|
||||
if rule.contents.data.unique_fields and (rule.contents.data.index == index or index == 'any'):
|
||||
if set(rule.contents.data.unique_fields) & set(fields):
|
||||
return False
|
||||
if exclude_fields:
|
||||
from .rule import get_unique_query_fields
|
||||
|
||||
unique_fields = get_unique_query_fields(rule)
|
||||
|
||||
for index, fields in exclude_fields.items():
|
||||
if unique_fields and (rule.contents.data.index == index or index == 'any'):
|
||||
if set(unique_fields) & set(fields):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -198,35 +202,10 @@ class Package(object):
|
||||
|
||||
def export(self, outfile, downgrade_version=None, verbose=True, skip_unsupported=False):
|
||||
"""Export rules into a consolidated ndjson file."""
|
||||
outfile = Path(outfile).with_suffix('.ndjson')
|
||||
unsupported = []
|
||||
from .main import _export_rules
|
||||
|
||||
if downgrade_version:
|
||||
if skip_unsupported:
|
||||
output_lines = []
|
||||
|
||||
for rule in self.rules:
|
||||
try:
|
||||
output_lines.append(json.dumps(downgrade_contents_from_rule(rule, downgrade_version),
|
||||
sort_keys=True))
|
||||
except ValueError as e:
|
||||
unsupported.append(f'{e}: {rule.id} - {rule.name}')
|
||||
continue
|
||||
|
||||
else:
|
||||
output_lines = [json.dumps(downgrade_contents_from_rule(r, downgrade_version), sort_keys=True)
|
||||
for r in self.rules]
|
||||
else:
|
||||
output_lines = [json.dumps(r.contents.data.to_dict(), sort_keys=True) for r in self.rules]
|
||||
|
||||
outfile.write_text('\n'.join(output_lines) + '\n')
|
||||
|
||||
if verbose:
|
||||
click.echo(f'Exported {len(self.rules) - len(unsupported)} rules into {outfile}')
|
||||
|
||||
if skip_unsupported and unsupported:
|
||||
unsupported_str = '\n- '.join(unsupported)
|
||||
click.echo(f'Skipped {len(unsupported)} unsupported rules: \n- {unsupported_str}')
|
||||
_export_rules(self.rules, outfile=outfile, downgrade_version=downgrade_version, verbose=verbose,
|
||||
skip_unsupported=skip_unsupported)
|
||||
|
||||
def get_package_hash(self, as_api=True, verbose=True):
|
||||
"""Get hash of package contents."""
|
||||
|
||||
@@ -17,6 +17,7 @@ from uuid import uuid4
|
||||
import eql
|
||||
from marshmallow import ValidationError, validates_schema
|
||||
|
||||
import kql
|
||||
from . import utils
|
||||
from .mixins import MarshmallowDataclassMixin
|
||||
from .rule_formatter import toml_write, nested_normalize
|
||||
@@ -585,5 +586,18 @@ def downgrade_contents_from_rule(rule: TOMLRule, target_version: str) -> dict:
|
||||
return payload
|
||||
|
||||
|
||||
def get_unique_query_fields(rule: TOMLRule) -> List[str]:
|
||||
"""Get a list of unique fields used in a rule query from rule contents."""
|
||||
contents = rule.contents.to_api_format()
|
||||
language = contents.get('language')
|
||||
query = contents.get('query')
|
||||
if language in ('kuery', 'eql'):
|
||||
# TODO: remove once py-eql supports ipv6 for cidrmatch
|
||||
with eql.parser.elasticsearch_syntax, eql.parser.ignore_missing_functions:
|
||||
parsed = kql.parse(query) if language == 'kuery' else eql.parse_query(query)
|
||||
|
||||
return sorted(set(str(f) for f in parsed if isinstance(f, (eql.ast.Field, kql.ast.Field))))
|
||||
|
||||
|
||||
# avoid a circular import
|
||||
from .rule_validators import KQLValidator, EQLValidator # noqa: E402
|
||||
|
||||
@@ -275,7 +275,7 @@ class RuleCollection(BaseCollection):
|
||||
|
||||
@cached
|
||||
def load_github_pr_rules(labels: list = None, repo: str = 'elastic/detection-rules', token=None, threads=50,
|
||||
verbose=True):
|
||||
verbose=True) -> (Dict[str, TOMLRule], Dict[str, TOMLRule], Dict[str, list]):
|
||||
"""Load all rules active as a GitHub PR."""
|
||||
import requests
|
||||
import pytoml
|
||||
@@ -303,7 +303,8 @@ def load_github_pr_rules(labels: list = None, repo: str = 'elastic/detection-rul
|
||||
response = requests.get(rule_file.raw_url)
|
||||
try:
|
||||
raw_rule = pytoml.loads(response.text)
|
||||
rule = TOMLRule(rule_file.filename, raw_rule)
|
||||
contents = TOMLRuleContents.from_dict(raw_rule)
|
||||
rule = TOMLRule(path=rule_file.filename, contents=contents)
|
||||
rule.gh_pr = pull
|
||||
|
||||
if rule in existing_rules:
|
||||
@@ -323,11 +324,11 @@ def load_github_pr_rules(labels: list = None, repo: str = 'elastic/detection-rul
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
new = OrderedDict([(rule.id, rule) for rule in sorted(new_rules, key=lambda r: r.name)])
|
||||
new = OrderedDict([(rule.contents.id, rule) for rule in sorted(new_rules, key=lambda r: r.contents.name)])
|
||||
modified = OrderedDict()
|
||||
|
||||
for modified_rule in sorted(modified_rules, key=lambda r: r.name):
|
||||
modified.setdefault(modified_rule.id, []).append(modified_rule)
|
||||
for modified_rule in sorted(modified_rules, key=lambda r: r.contents.name):
|
||||
modified.setdefault(modified_rule.contents.id, []).append(modified_rule)
|
||||
|
||||
return new, modified, errors
|
||||
|
||||
|
||||
+18
-14
@@ -24,6 +24,7 @@ from pathlib import Path
|
||||
from typing import Dict, Union, Optional, Callable
|
||||
|
||||
import click
|
||||
import pytoml
|
||||
import eql.utils
|
||||
from eql.utils import load_dump, stream_json_lines
|
||||
|
||||
@@ -271,30 +272,33 @@ def clear_caches():
|
||||
_cache.clear()
|
||||
|
||||
|
||||
def load_rule_contents(rule_file: str, single_only=False) -> list:
|
||||
def load_rule_contents(rule_file: Path, single_only=False) -> list:
|
||||
"""Load a rule file from multiple formats."""
|
||||
_, extension = os.path.splitext(rule_file)
|
||||
raw_text = rule_file.read_text()
|
||||
|
||||
if extension in ('.ndjson', '.jsonl'):
|
||||
# kibana exported rule object is ndjson with the export metadata on the last line
|
||||
with open(rule_file, 'r') as f:
|
||||
contents = [json.loads(line) for line in f.readlines()]
|
||||
contents = [json.loads(line) for line in raw_text.splitlines()]
|
||||
|
||||
if len(contents) > 1 and 'exported_count' in contents[-1]:
|
||||
contents.pop(-1)
|
||||
if len(contents) > 1 and 'exported_count' in contents[-1]:
|
||||
contents.pop(-1)
|
||||
|
||||
if single_only and len(contents) > 1:
|
||||
raise ValueError('Multiple rules not allowed')
|
||||
if single_only and len(contents) > 1:
|
||||
raise ValueError('Multiple rules not allowed')
|
||||
|
||||
return contents or [{}]
|
||||
return contents or [{}]
|
||||
elif extension == '.toml':
|
||||
rule = pytoml.loads(raw_text)
|
||||
else:
|
||||
rule = load_dump(rule_file)
|
||||
if isinstance(rule, dict):
|
||||
return [rule]
|
||||
elif isinstance(rule, list):
|
||||
return rule
|
||||
else:
|
||||
raise ValueError(f"Expected a list or dictionary in {rule_file}")
|
||||
|
||||
if isinstance(rule, dict):
|
||||
return [rule]
|
||||
elif isinstance(rule, list):
|
||||
return rule
|
||||
else:
|
||||
raise ValueError(f"Expected a list or dictionary in {rule_file}")
|
||||
|
||||
|
||||
def format_command_options(ctx):
|
||||
|
||||
@@ -38,7 +38,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -63,7 +69,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -38,7 +38,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -72,7 +78,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -42,7 +42,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -70,7 +76,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -38,7 +38,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -77,7 +83,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -44,7 +44,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -89,7 +95,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -276,7 +288,13 @@
|
||||
"threat_filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -38,7 +38,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -77,7 +83,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -38,7 +38,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -63,7 +69,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -38,7 +38,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -75,7 +81,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -42,7 +42,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -80,7 +86,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -38,7 +38,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -77,7 +83,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -44,7 +44,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -89,7 +95,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -285,7 +297,13 @@
|
||||
"threat_filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
@@ -38,7 +38,13 @@
|
||||
"filters": {
|
||||
"items": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
@@ -77,7 +83,13 @@
|
||||
},
|
||||
"meta": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"object",
|
||||
"array",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
|
||||
+22
-3
@@ -26,6 +26,7 @@ class Kibana(object):
|
||||
self.authenticated = False
|
||||
self.session = requests.Session()
|
||||
self.session.verify = verify
|
||||
self.verify = verify
|
||||
|
||||
self.cloud_id = cloud_id
|
||||
self.kibana_url = kibana_url.rstrip('/') if kibana_url else None
|
||||
@@ -33,6 +34,9 @@ class Kibana(object):
|
||||
self.space = space if space and space.lower() != 'default' else None
|
||||
self.status = None
|
||||
|
||||
self.provider_name = None
|
||||
self.provider_type = None
|
||||
|
||||
if self.cloud_id:
|
||||
self.cluster_name, cloud_info = self.cloud_id.split(":")
|
||||
self.domain, self.es_uuid, self.kibana_uuid = \
|
||||
@@ -46,6 +50,9 @@ class Kibana(object):
|
||||
|
||||
self.elastic_url = f"https://{self.es_uuid}.{self.domain}:9243"
|
||||
|
||||
self.provider_name = 'cloud-basic'
|
||||
self.provider_type = 'basic'
|
||||
|
||||
self.session.headers.update({'Content-Type': "application/json", "kbn-xsrf": str(uuid.uuid4())})
|
||||
self.elasticsearch = elasticsearch
|
||||
|
||||
@@ -111,7 +118,7 @@ class Kibana(object):
|
||||
"""Perform an HTTP DELETE."""
|
||||
return self.request('DELETE', uri, params=params, error=error, **kwargs)
|
||||
|
||||
def login(self, kibana_username, kibana_password):
|
||||
def login(self, kibana_username, kibana_password, provider_type=None, provider_name=None):
|
||||
"""Authenticate to Kibana using the API to update our cookies."""
|
||||
payload = {'username': kibana_username, 'password': kibana_password}
|
||||
path = '/internal/security/login'
|
||||
@@ -120,8 +127,19 @@ class Kibana(object):
|
||||
self.post(path, data=payload, error=True, verbose=False)
|
||||
except requests.HTTPError as e:
|
||||
# 7.10 changed the structure of the auth data
|
||||
# providers dictated by Kibana configs in:
|
||||
# https://www.elastic.co/guide/en/kibana/current/security-settings-kb.html#authentication-security-settings
|
||||
# more details: https://discuss.elastic.co/t/kibana-7-10-login-issues/255201/2
|
||||
if e.response.status_code == 400 and '[undefined]' in e.response.text:
|
||||
payload = {'params': payload, 'currentURL': '', 'providerType': 'basic', 'providerName': 'cloud-basic'}
|
||||
provider_type = provider_type or self.provider_type or 'basic'
|
||||
provider_name = provider_name or self.provider_name or 'basic'
|
||||
|
||||
payload = {
|
||||
'params': payload,
|
||||
'currentURL': '',
|
||||
'providerType': provider_type,
|
||||
'providerName': provider_name
|
||||
}
|
||||
self.post(path, data=payload, error=True)
|
||||
else:
|
||||
raise
|
||||
@@ -135,7 +153,8 @@ class Kibana(object):
|
||||
|
||||
# create ES and force authentication
|
||||
if self.elasticsearch is None and self.elastic_url is not None:
|
||||
self.elasticsearch = Elasticsearch(hosts=[self.elastic_url], http_auth=(kibana_username, kibana_password))
|
||||
self.elasticsearch = Elasticsearch(hosts=[self.elastic_url], http_auth=(kibana_username, kibana_password),
|
||||
verify_certs=self.verify)
|
||||
self.elasticsearch.info()
|
||||
|
||||
# make chaining easier
|
||||
|
||||
Reference in New Issue
Block a user