From f8ede0e29b082bd5d9ac346f69886274adedee42 Mon Sep 17 00:00:00 2001 From: pyth0n1c <87383215+pyth0n1c@users.noreply.github.com> Date: Wed, 12 Jul 2023 12:49:21 -0700 Subject: [PATCH] Update to fix some issues with app building and inspection. Test api_deploy mode for content. --- contentctl/actions/api_deploy.py | 163 +++++++++++++----------- contentctl/contentctl.py | 12 +- contentctl/objects/config.py | 11 +- contentctl/output/conf_output.py | 7 +- contentctl/output/templates/app.conf.j2 | 2 +- 5 files changed, 108 insertions(+), 87 deletions(-) diff --git a/contentctl/actions/api_deploy.py b/contentctl/actions/api_deploy.py index 5f4b50d6..04d17f7b 100644 --- a/contentctl/actions/api_deploy.py +++ b/contentctl/actions/api_deploy.py @@ -34,83 +34,96 @@ def fix_newlines_in_conf_files(self, conf_path: pathlib.Path) -> RawConfigParser return parser def execute(self, input_dto: API_DeployInputDto) -> None: - - splunk_args = { - "host": input_dto.config.deploy.server, - "port": 8089, - "username": input_dto.config.deploy.username, - "password": input_dto.config.deploy.password, - "owner": "nobody", - "app": input_dto.config.deploy.app, - } - service = client.connect(**splunk_args) - + if len(input_dto.config.deployments.rest_api_deployments) == 0: + raise Exception("No rest_api_deployments defined in 'contentctl.yml'") + app_path = pathlib.Path(input_dto.config.build.path_root)/input_dto.config.build.name + if not app_path.is_dir(): + raise Exception(f"The unpackaged app does not exist at the path {app_path}. Please run 'contentctl build' to generate the app.") + for target in input_dto.config.deployments.rest_api_deployments: + print(f"Deploying '{input_dto.config.build.name}' to target '{target.server}' [{target.description}]") + splunk_args = { + "host": target.server, + "port": target.port, + "username": target.username, + "password": target.password, + "owner": "nobody", + "app": "search", + } + print("Warning - we are currently deploying all content into the 'search' app. " + "At this time, this means the user does not have to install the app " + "manually, but this will change") + service = client.connect(**splunk_args) + + + macros_parser = self.fix_newlines_in_conf_files( + app_path/"default"/"macros.conf" + ) + import tqdm + + bar_format_macros = ( + f"Deploying macros " + + "{percentage:3.0f}%[{bar:20}]" + + "[{n_fmt}/{total_fmt} | ETA: {remaining}]" + ) + bar_format_detections = ( + f"Deploying saved searches" + + "{percentage:3.0f}%[{bar:20}]" + + "[{n_fmt}/{total_fmt} | ETA: {remaining}]" + ) + for section in tqdm.tqdm( + macros_parser.sections(), bar_format=bar_format_macros + ): + try: + service.post("properties/macros", __stanza=section) + service.post("properties/macros/" + section, **macros_parser[section]) + tqdm.tqdm.write(f"Deployed macro [{section}]") + except Exception as e: + tqdm.tqdm.write(f"Error deploying macro {section}: {str(e)}") + + detection_parser = RawConfigParser() + detection_parser = self.fix_newlines_in_conf_files( + app_path/"default"/"savedsearches.conf", + ) - macros_parser = self.fix_newlines_in_conf_files( - pathlib.Path(input_dto.config.build.path_root)/input_dto.config.build.splunk_app.path/"default"/"macros.conf" - ) - import tqdm - - bar_format_macros = ( - f"Deploying macros " - + "{percentage:3.0f}%[{bar:20}]" - + "[{n_fmt}/{total_fmt} | ETA: {remaining}]" - ) - bar_format_detections = ( - f"Deploying saved searches" - + "{percentage:3.0f}%[{bar:20}]" - + "[{n_fmt}/{total_fmt} | ETA: {remaining}]" - ) - for section in tqdm.tqdm( - macros_parser.sections(), bar_format=bar_format_macros - ): - try: - service.post("properties/macros", __stanza=section) - service.post("properties/macros/" + section, **macros_parser[section]) - # print("Deployed macro: " + section) - except Exception as e: - tqdm.tqdm.write(f"Error deploying macro {section}: {str(e)}") - - detection_parser = RawConfigParser() - detection_parser = self.fix_newlines_in_conf_files( - pathlib.Path(input_dto.config.build.path_root)/input_dto.config.build.splunk_app.path/"default"/"savedsearches.conf", - ) - try: - service.delete("saved/searches/MSCA - Anomalous usage of 7zip - Rule") - except Exception as e: - pass - - for section in tqdm.tqdm( - detection_parser.sections(), bar_format=bar_format_detections - ): - try: - if section.startswith(input_dto.config.build.prefix): - params = detection_parser[section] - params["name"] = section - response_actions = [] - if ( - input_dto.config.detection_configuration.notable - and input_dto.config.detection_configuration.notable.rule_description - ): - response_actions.append("notable") - if ( - input_dto.config.detection_configuration.rba - and input_dto.config.detection_configuration.rba.enabled - ): - response_actions.append("risk") - params["actions"] = ",".join(response_actions) - params["request.ui_dispatch_app"] = "ES Content Updates" - params["request.ui_dispatch_view"] = "ES Content Updates" - params["alert_type"] = params.pop("counttype") - params["alert_comparator"] = params.pop("relation") - params["alert_threshold"] = params.pop("quantity") - params.pop("enablesched") - - service.post("saved/searches", **params) - # print("Deployed detection: " + params["name"]) - except Exception as e: - tqdm.tqdm.write(f"Error deploying saved search {section}: {str(e)}") + for section in tqdm.tqdm( + detection_parser.sections(), bar_format=bar_format_detections + ): + try: + if section.startswith(input_dto.config.build.prefix): + params = detection_parser[section] + params["name"] = section + response_actions = [] + if ( + input_dto.config.detection_configuration.notable + and input_dto.config.detection_configuration.notable.rule_description + ): + response_actions.append("notable") + if ( + input_dto.config.detection_configuration.rba + and input_dto.config.detection_configuration.rba.enabled + ): + response_actions.append("risk") + params["actions"] = ",".join(response_actions) + params["request.ui_dispatch_app"] = "ES Content Updates" + params["request.ui_dispatch_view"] = "ES Content Updates" + params["alert_type"] = params.pop("counttype") + params["alert_comparator"] = params.pop("relation") + params["alert_threshold"] = params.pop("quantity") + params.pop("enablesched") + + try: + service.saved_searches.delete(section) + #tqdm.tqdm.write(f"Deleted old saved search: {section}") + except Exception as e: + #tqdm.tqdm.write(f"Error deleting savedsearch '{section}' :[{str(e)}]") + pass + + service.post("saved/searches", **params) + tqdm.tqdm.write(f"Deployed savedsearch [{section}]") + + except Exception as e: + tqdm.tqdm.write(f"Error deploying saved search {section}: {str(e)}") # story_parser = RawConfigParser() # story_parser.read(os.path.join(input_dto.path, input_dto.config.build.splunk_app.path, "default", "analyticstories.conf")) diff --git a/contentctl/contentctl.py b/contentctl/contentctl.py index 0e5fc7d7..1be7ab5b 100644 --- a/contentctl/contentctl.py +++ b/contentctl/contentctl.py @@ -125,12 +125,16 @@ def inspect(args) -> None: i.execute(input_dto=input_dto) -def deploy(args) -> None: +def api_deploy(args) -> None: config = start(args) deploy_input_dto = API_DeployInputDto(path=pathlib.Path(args.path), config=config) deploy = API_Deploy() + deploy.execute(deploy_input_dto) +def acs_deploy(args) -> None: + config = start(args) + raise NotImplementedError("ACS Deploy is not yet implemented.") def test(args: argparse.Namespace): args = configure_unattended(args) @@ -293,8 +297,8 @@ def main(): "inspect", help="runs Splunk appinspect on a build Splunk app to ensure that an app meets Splunkbase requirements.", ) - deploy_parser = actions_parser.add_parser( - "deploy", help="install an application on a target Splunk instance." + api_deploy_parser = actions_parser.add_parser( + "api_deploy", help="Deploy content via API to a target Splunk Instance." ) docs_parser = actions_parser.add_parser( "docs", help="create documentation in docs folder" @@ -334,7 +338,7 @@ def main(): ) inspect_parser.set_defaults(func=inspect) - deploy_parser.set_defaults(func=deploy) + api_deploy_parser.set_defaults(func=api_deploy) test_parser.add_argument( "--mode", diff --git a/contentctl/objects/config.py b/contentctl/objects/config.py index 1e1b4d82..d3cb6b77 100644 --- a/contentctl/objects/config.py +++ b/contentctl/objects/config.py @@ -61,6 +61,7 @@ class ConfigAlertAction(BaseModel): class ConfigDeploy(BaseModel): + description: str = "Description for this deployment target" server: str = "127.0.0.1" CREDENTIAL_MISSING = "PROVIDE_CREDENTIALS_VIA_CMD_LINE_ARGUMENT" @@ -76,7 +77,7 @@ class ConfigDeployRestAPI(ConfigDeploy): class Deployments(BaseModel): acs_deployments: list[ConfigDeployACS] = [] - rest_api_deployments: list[ConfigDeployRestAPI] = [] + rest_api_deployments: list[ConfigDeployRestAPI] = [ConfigDeployRestAPI()] @@ -94,9 +95,9 @@ class ConfigBuildBa(BaseModel): class ConfigBuild(BaseModel): # Fields required for app.conf based on # https://docs.splunk.com/Documentation/Splunk/9.0.4/Admin/Appconf - name: str = Field(default="Custom_Splunk_Content_Pack", title="The name for your Content Pack (app) ") + name: str = Field(default="ContentPack",title="Internal name used by your app. No spaces or special characters.") path_root: str = Field(default="dist",title="The root path at which you will build your app.") - prefix: str = Field(default="custom_prefix",title="A short prefix to easily identify all your content.") + prefix: str = Field(default="ContentPack",title="A short prefix to easily identify all your content.") build: int = Field(default=int(datetime.utcnow().strftime("%Y%m%d%H%M%S")), title="Build number for your app. This will always be a number that corresponds to the time of the build in the format YYYYMMDDHHMMSS") version: str = Field(default="0.0.1",title="The version of your Content Pack. This must follow semantic versioning guidelines.") @@ -115,8 +116,8 @@ class ConfigBuild(BaseModel): # * must not be any of the following names: CON, PRN, AUX, NUL, # COM1, COM2, COM3, COM4, COM5, COM6, COM7, COM8, COM9, # LPT1, LPT2, LPT3, LPT4, LPT5, LPT6, LPT7, LPT8, LPT9 - id: str = Field(default="Custom_Splunk_Content_Pack",title="Special name required for publishing your app on Splunkbase") - label: str = Field(default="custom_label",title="Another label for your content.") + id: str = Field(default="ContentPack",title="Internal name used by your app. No spaces or special characters.") + label: str = Field(default="Custom Splunk Content Pack",title="This is the app name that shows in the launcher.") author_name: str = Field(default="author name",title="Name of the Content Pack Author.") author_email: str = Field(default="author@contactemailaddress.com",title="Contact email for the Content Pack Author") author_company: str = Field(default="author company",title="Name of the company who has developed the Content Pack") diff --git a/contentctl/output/conf_output.py b/contentctl/output/conf_output.py index b5f1b509..2b3c26a0 100644 --- a/contentctl/output/conf_output.py +++ b/contentctl/output/conf_output.py @@ -155,8 +155,11 @@ def packageApp(self) -> None: from slim.utils import SlimLogger import logging #In order to avoid significant output, only emit FATAL log messages - SlimLogger.set_level(logging.FATAL) - slim.package(source=input_app_path, output_dir=pathlib.Path(self.config.build.path_root)) + SlimLogger.set_level(logging.ERROR) + try: + slim.package(source=input_app_path, output_dir=pathlib.Path(self.config.build.path_root)) + except SystemExit as e: + raise Exception(f"Error building package with slim: {str(e)}") else: with tarfile.open(output_app_expected_name, "w:gz") as app_archive: app_archive.add(self.output_path, arcname=os.path.basename(self.output_path)) diff --git a/contentctl/output/templates/app.conf.j2 b/contentctl/output/templates/app.conf.j2 index 01f45eb1..ea7843c6 100644 --- a/contentctl/output/templates/app.conf.j2 +++ b/contentctl/output/templates/app.conf.j2 @@ -18,7 +18,7 @@ version = {{ conf.version }} description = {{ conf.description }} [package] -id = {{ conf.name }} +id = {{ conf.id }} [install] is_configured = false