From d902590b037a1c43f616479bedf3de645b4ac848 Mon Sep 17 00:00:00 2001 From: Artem Rys <79191415+arys-splunk@users.noreply.github.com> Date: Wed, 7 Jul 2021 10:49:42 +0200 Subject: [PATCH] style: apply black formatter (#254) --- docs/conf.py | 17 +- splunk_add_on_ucc_framework/__init__.py | 338 ++++++++++-------- .../alert_utils/__init__.py | 2 +- .../alert_utils_common/__init__.py | 2 +- .../alert_utils_common/builder_constant.py | 35 +- .../alert_utils_common/conf_parser.py | 49 +-- .../alert_utils/alert_utils_common/logger.py | 52 +-- .../metric_collector/__init__.py | 2 +- .../metric_collector/event_writer.py | 44 +-- .../metric_collector/memory_event_writer.py | 10 +- .../metric_collector/metric_aggregator.py | 72 ++-- .../metric_collector/metric_exception.py | 1 + .../metric_collector/metric_util.py | 29 +- .../metric_collector/monitor.py | 65 ++-- .../number_metric_collector.py | 66 ++-- .../modular_alert_builder/__init__.py | 2 +- .../build_core/__init__.py | 37 +- .../build_core/alert_actions_base.py | 10 +- .../build_core/alert_actions_conf_gen.py | 186 ++++++---- .../build_core/alert_actions_exceptions.py | 1 + .../build_core/alert_actions_helper.py | 278 +++++++------- .../build_core/alert_actions_html_gen.py | 99 ++--- .../build_core/alert_actions_merge.py | 35 +- .../build_core/alert_actions_py_gen.py | 162 +++++---- .../build_core/alert_actions_template.py | 8 +- .../build_core/cim_actions.py | 71 ++-- splunk_add_on_ucc_framework/normalize.py | 38 +- .../start_alert_build.py | 53 ++- .../uccrestbuilder/__init__.py | 11 +- .../uccrestbuilder/builder.py | 46 +-- .../uccrestbuilder/endpoint/__init__.py | 2 +- .../uccrestbuilder/endpoint/datainput.py | 1 - .../uccrestbuilder/endpoint/field.py | 6 +- .../uccrestbuilder/endpoint/multiple_model.py | 1 - .../uccrestbuilder/endpoint/oauth_model.py | 1 - .../uccrestbuilder/endpoint/single_model.py | 1 - .../uccrestbuilder/global_config.py | 31 +- .../uccrestbuilder/rest_conf.py | 19 +- tests/__init__.py | 2 +- tests/conftest.py | 18 +- tests/data/test_ucc_generate.py | 4 +- 41 files changed, 1058 insertions(+), 849 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 789889bfe..d8da57c46 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -17,9 +17,9 @@ # -- Project information ----------------------------------------------------- -project = 'addonfactory-ucc-generator' -copyright = '2021, Splunk, Inc.' -author = 'Splunk, Inc.' +project = "addonfactory-ucc-generator" +copyright = "2021, Splunk, Inc." +author = "Splunk, Inc." # -- General configuration --------------------------------------------------- @@ -27,16 +27,15 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [ -] +extensions = [] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # -- Options for HTML output ------------------------------------------------- @@ -44,9 +43,9 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] \ No newline at end of file +html_static_path = ["_static"] diff --git a/splunk_add_on_ucc_framework/__init__.py b/splunk_add_on_ucc_framework/__init__.py index fa4a1f3e1..fd797f8b9 100644 --- a/splunk_add_on_ucc_framework/__init__.py +++ b/splunk_add_on_ucc_framework/__init__.py @@ -49,13 +49,11 @@ sourcedir = os.path.dirname(os.path.realpath(__file__)) -j2_env = Environment( - loader=FileSystemLoader(os.path.join(sourcedir, "templates")) -) +j2_env = Environment(loader=FileSystemLoader(os.path.join(sourcedir, "templates"))) -logger = logging.getLogger('UCC') +logger = logging.getLogger("UCC") logger.setLevel(logging.INFO) -formatter = logging.Formatter('%(asctime)s [%(name)s] %(levelname)s: %(message)s') +formatter = logging.Formatter("%(asctime)s [%(name)s] %(levelname)s: %(message)s") shandler = logging.StreamHandler() shandler.setLevel(logging.INFO) shandler.setFormatter(formatter) @@ -82,6 +80,7 @@ def get_os_path(path): path = path.replace("/", os.sep) return path.strip(os.sep) + def recursive_overwrite(src, dest, ignore_list=None): """ Method to copy from src to dest recursively. @@ -122,6 +121,7 @@ def clean_before_build(outputdir): os.makedirs(os.path.join(outputdir)) logger.info("Cleaned out directory " + outputdir) + def version_tuple(version_str): """ convert string into tuple to compare version @@ -175,7 +175,7 @@ def handle_update(config_path): config_path : path to globalConfig.json Returns: - dictionary : schema_content (globalConfig.json) + dictionary : schema_content (globalConfig.json) """ with open(config_path) as config_file: schema_content = json.load(config_file) @@ -186,10 +186,10 @@ def handle_update(config_path): schema_content = handle_biased_terms_update(schema_content) with open(config_path, "w") as config_file: json.dump(schema_content, config_file, ensure_ascii=False, indent=4) - + # check for schemaVersion, if it's less than 0.0.2 then updating globalConfig.json if version_tuple(version) < version_tuple("0.0.2"): - ta_tabs = schema_content.get("pages").get("configuration",{}).get("tabs",{}) + ta_tabs = schema_content.get("pages").get("configuration", {}).get("tabs", {}) # check for schema changes in configuration page of globalConfig.json for tab in ta_tabs: @@ -198,39 +198,51 @@ def handle_update(config_path): oauth_state_enabled_entity = {} for entity in conf_entities: if entity.get("field") == "oauth_state_enabled": - logger.warn("oauth_state_enabled field is no longer a separate entity since UCC version 5.0.0. It is now an option in the oauth field. Please update the globalconfig.json file accordingly.") + logger.warn( + "oauth_state_enabled field is no longer a separate entity since UCC version 5.0.0. It is now an option in the oauth field. Please update the globalconfig.json file accordingly." + ) oauth_state_enabled_entity = entity - if entity.get("field") == "oauth" and not entity.get("options",{}).get("oauth_state_enabled"): - entity["options"]["oauth_state_enabled"] = False - + if entity.get("field") == "oauth" and not entity.get( + "options", {} + ).get("oauth_state_enabled"): + entity["options"]["oauth_state_enabled"] = False + if oauth_state_enabled_entity: conf_entities.remove(oauth_state_enabled_entity) - + tab_options = tab.get("options", {}) if tab_options.get("onChange"): - logger.error("The onChange option is no longer supported since UCC version 5.0.0. You can use custom hooks to implement these actions.") + logger.error( + "The onChange option is no longer supported since UCC version 5.0.0. You can use custom hooks to implement these actions." + ) del tab_options["onChange"] if tab_options.get("onLoad"): - logger.error("The onLoad option is no longer supported since UCC version 5.0.0. You can use custom hooks to implement these actions.") + logger.error( + "The onLoad option is no longer supported since UCC version 5.0.0. You can use custom hooks to implement these actions." + ) del tab_options["onLoad"] - - is_inputs = ("inputs" in schema_content.get("pages")) + + is_inputs = "inputs" in schema_content.get("pages") if is_inputs: - services = schema_content.get("pages").get("inputs",{}).get("services",{}) + services = schema_content.get("pages").get("inputs", {}).get("services", {}) for service in services: service_options = service.get("options", {}) if service_options.get("onChange"): - logger.error("The onChange option is no longer supported since UCC version 5.0.0. You can use custom hooks to implement these actions.") + logger.error( + "The onChange option is no longer supported since UCC version 5.0.0. You can use custom hooks to implement these actions." + ) del service_options["onChange"] if service_options.get("onLoad"): - logger.error("The onLoad option is no longer supported since UCC version 5.0.0. You can use custom hooks to implement these actions.") + logger.error( + "The onLoad option is no longer supported since UCC version 5.0.0. You can use custom hooks to implement these actions." + ) del service_options["onLoad"] schema_content["meta"]["schemaVersion"] = "0.0.2" with open(config_path, "w") as config_file: - json.dump(schema_content,config_file, ensure_ascii=False, indent=4) - + json.dump(schema_content, config_file, ensure_ascii=False, indent=4) + return schema_content @@ -261,6 +273,7 @@ def replace_token(ta_name, outputdir): s = s.replace("${ta.name}", ta_name.lower()) f.write(s) + def install_libs(path, ucc_lib_target): """ Install 3rd Party libraries in addon. @@ -285,40 +298,51 @@ def _install_libs(requirements, ucc_target, installer="python3"): if not os.path.exists(ucc_target): os.makedirs(ucc_target) install_cmd = ( - installer +" -m pip install -r \"" + installer + + ' -m pip install -r "' + requirements - + "\" --no-compile --prefer-binary --ignore-installed --use-deprecated=legacy-resolver --target \"" + + '" --no-compile --prefer-binary --ignore-installed --use-deprecated=legacy-resolver --target "' + ucc_target - + "\"" + + '"' ) - os.system(installer +" -m pip install pip --upgrade") + os.system(installer + " -m pip install pip --upgrade") os.system(install_cmd) remove_files(ucc_target) + logging.info(f" Checking for requirements in {path}") - if os.path.exists(os.path.join(path,"lib", "requirements.txt")): - logging.info(f" Uses common requirements") - _install_libs(requirements=os.path.join(path, "lib","requirements.txt"), ucc_target=ucc_lib_target) - elif os.path.exists(os.path.join(os.path.abspath(os.path.join(path, os.pardir)), "requirements.txt")): - logging.info(f" Uses common requirements") - _install_libs(requirements=os.path.join(os.path.abspath(os.path.join(path, os.pardir)), "requirements.txt"), ucc_target=ucc_lib_target) + if os.path.exists(os.path.join(path, "lib", "requirements.txt")): + logging.info(f" Uses common requirements") + _install_libs( + requirements=os.path.join(path, "lib", "requirements.txt"), + ucc_target=ucc_lib_target, + ) + elif os.path.exists( + os.path.join(os.path.abspath(os.path.join(path, os.pardir)), "requirements.txt") + ): + logging.info(f" Uses common requirements") + _install_libs( + requirements=os.path.join( + os.path.abspath(os.path.join(path, os.pardir)), "requirements.txt" + ), + ucc_target=ucc_lib_target, + ) else: - logging.info(f" Not using common requirements") + logging.info(f" Not using common requirements") - - #Prevent certain packages from being included pip could be dangerous others are just wasted space - noshipdirs = ['setuptools', 'bin', 'pip', 'distribute', 'wheel'] + # Prevent certain packages from being included pip could be dangerous others are just wasted space + noshipdirs = ["setuptools", "bin", "pip", "distribute", "wheel"] p = Path(ucc_lib_target) for nsd in noshipdirs: try: - #Glob can return FileNotFoundError exception if no match - for o in p.glob(nsd + '*'): + # Glob can return FileNotFoundError exception if no match + for o in p.glob(nsd + "*"): if o.is_dir(): - logging.info(f" removing directory {o} from output must not ship") + logging.info(f" removing directory {o} from output must not ship") shutil.rmtree(o) except FileNotFoundError: pass - #Remove execute bit from any object in lib + # Remove execute bit from any object in lib NO_USER_EXEC = ~stat.S_IEXEC NO_GROUP_EXEC = ~stat.S_IXGRP NO_OTHER_EXEC = ~stat.S_IXOTH @@ -326,12 +350,11 @@ def _install_libs(requirements, ucc_target, installer="python3"): for o in p.rglob("*"): if not o.is_dir() and os.access(o, os.X_OK): - logging.info(f" fixing {o} execute bit") + logging.info(f" fixing {o} execute bit") current_permissions = stat.S_IMODE(os.lstat(o).st_mode) os.chmod(o, current_permissions & NO_EXEC) - def remove_files(path): """ Remove *.egg-info and *.dist-info files in given path. @@ -340,10 +363,13 @@ def remove_files(path): path (str): Path to remove *.egg-info and *.dist-info files. """ - rmdirs = glob.glob(os.path.join(path, "*.egg-info")) + glob.glob(os.path.join(path, "*.dist-info")) + rmdirs = glob.glob(os.path.join(path, "*.egg-info")) + glob.glob( + os.path.join(path, "*.dist-info") + ) for rmdir in rmdirs: shutil.rmtree(rmdir) + def generate_rest(ta_name, scheme, import_declare_name, outputdir): """ Build REST for Add-on. @@ -395,9 +421,7 @@ def replace_oauth_html_template_token(ta_name, ta_version, outputdir): outputdir (str): output directory. """ - html_template_path = os.path.join( - outputdir, ta_name, "appserver", "templates" - ) + html_template_path = os.path.join(outputdir, ta_name, "appserver", "templates") with open(os.path.join(html_template_path, "redirect.html")) as f: s = f.read() @@ -442,11 +466,21 @@ def modify_and_replace_token_for_oauth_templates( + ta_version + ".js" ) - redirect_html_dest = ( - os.path.join(outputdir, ta_name, "appserver", "templates", ta_name.lower() + "_redirect.html") + redirect_html_dest = os.path.join( + outputdir, + ta_name, + "appserver", + "templates", + ta_name.lower() + "_redirect.html", ) - redirect_xml_dest = ( - os.path.join(outputdir, ta_name, "default", "data", "ui", "views", ta_name.lower() + "_redirect.xml") + redirect_xml_dest = os.path.join( + outputdir, + ta_name, + "default", + "data", + "ui", + "views", + ta_name.lower() + "_redirect.xml", ) os.rename(redirect_js_src, redirect_js_dest) os.rename(redirect_html_src, redirect_html_dest) @@ -458,9 +492,8 @@ def modify_and_replace_token_for_oauth_templates( os.remove(redirect_html_src) os.remove(redirect_js_src) -def add_modular_input( - ta_name, schema_content, import_declare_name, outputdir -): + +def add_modular_input(ta_name, schema_content, import_declare_name, outputdir): """ Generate Modular input for addon. @@ -493,26 +526,22 @@ def add_modular_input( description=description, entity=entity, ) - input_file_name = os.path.join( - outputdir, ta_name, "bin", input_name + ".py" - ) + input_file_name = os.path.join(outputdir, ta_name, "bin", input_name + ".py") with open(input_file_name, "w") as input_file: input_file.write(content) - input_default = os.path.join( - outputdir, ta_name, "default", "inputs.conf" - ) + input_default = os.path.join(outputdir, ta_name, "default", "inputs.conf") config = configparser.ConfigParser() if os.path.exists(input_default): config.read(input_default) - + if config.has_section(input_name): - config[input_name]['python.version'] = 'python3' + config[input_name]["python.version"] = "python3" else: - config[input_name] = {'python.version': 'python3'} - - with open(input_default, 'w') as configfile: - config.write(configfile) + config[input_name] = {"python.version": "python3"} + + with open(input_default, "w") as configfile: + config.write(configfile) def make_modular_alerts(ta_name, ta_namespace, schema_content, outputdir): @@ -535,6 +564,7 @@ def make_modular_alerts(ta_name, ta_namespace, schema_content, outputdir): sourcedir, ) + def get_ignore_list(ta_name, path): """ Return path of files/folders to be removed. @@ -551,9 +581,13 @@ def get_ignore_list(ta_name, path): else: with open(path) as ignore_file: ignore_list = ignore_file.readlines() - ignore_list = [(os.path.join("output", ta_name, get_os_path(path))).strip() for path in ignore_list] + ignore_list = [ + (os.path.join("output", ta_name, get_os_path(path))).strip() + for path in ignore_list + ] return ignore_list + def remove_listed_files(ignore_list): """ Return path of files/folders to removed in output folder. @@ -569,7 +603,12 @@ def remove_listed_files(ignore_list): elif os.path.isdir(path): shutil.rmtree(path, ignore_errors=True) else: - logger.info("While ignoring the files mentioned in .uccignore {} was not found".format(path)) + logger.info( + "While ignoring the files mentioned in .uccignore {} was not found".format( + path + ) + ) + def update_ta_version(config, ta_version): """ @@ -585,6 +624,7 @@ def update_ta_version(config, ta_version): with open(config, "w") as config_file: json.dump(schema_content, config_file, indent=4) + def handle_no_inputs(ta_name, outputdir): """ Handle for configuration without input page. @@ -593,6 +633,7 @@ def handle_no_inputs(ta_name, outputdir): ta_name (str): Name of TA. outputdir (str): output directory. """ + def _removeinput(path): """ Remove "inputs" view from default.xml @@ -604,56 +645,64 @@ def _removeinput(path): root = tree.getroot() for element in root: - if element.tag =="view" and element.get('name') == "inputs": + if element.tag == "view" and element.get("name") == "inputs": root.remove(element) tree.write(path) default_xml_file = os.path.join( - outputdir, ta_name, "default", "data", "ui", "nav","default.xml" + outputdir, ta_name, "default", "data", "ui", "nav", "default.xml" ) # Remove "inputs" view from default.xml _removeinput(default_xml_file) file_remove_list = [] - file_remove_list.append(os.path.join( - outputdir, ta_name, "default", "data", "ui", "views","inputs.xml" - )) - file_remove_list.append(os.path.join(outputdir,ta_name,"appserver","static","css","inputs.css")) - file_remove_list.append(os.path.join(outputdir,ta_name,"appserver","static","css","createInput.css")) + file_remove_list.append( + os.path.join(outputdir, ta_name, "default", "data", "ui", "views", "inputs.xml") + ) + file_remove_list.append( + os.path.join(outputdir, ta_name, "appserver", "static", "css", "inputs.css") + ) + file_remove_list.append( + os.path.join( + outputdir, ta_name, "appserver", "static", "css", "createInput.css" + ) + ) # Remove unnecessary files for fl in file_remove_list: try: os.remove(fl) except OSError: pass - + + def save_comments(outputdir, ta_name): """ Save index and content of comments in conf file and return dictionary thereof """ - config_file = os.path.join(outputdir, ta_name,'default', "app.conf") + config_file = os.path.join(outputdir, ta_name, "default", "app.conf") comment_map = {} with open(config_file) as file: i = 0 lines = file.readlines() for line in lines: - if re.match( r'^\s*#.*?$', line): + if re.match(r"^\s*#.*?$", line): comment_map[i] = line i += 1 return comment_map + def restore_comments(outputdir, ta_name, comment_map): """ Write comments to conf file at their original indices """ - config_file = os.path.join(outputdir, ta_name,'default', "app.conf") + config_file = os.path.join(outputdir, ta_name, "default", "app.conf") with open(config_file) as file: lines = file.readlines() for (index, comment) in sorted(comment_map.items()): lines.insert(index, comment) - with open(config_file, 'w') as file: - file.write(''.join(lines)) + with open(config_file, "w") as file: + file.write("".join(lines)) def validate_config_against_schema(config: dict): @@ -674,7 +723,7 @@ def _generate(source, config, ta_version, outputdir=None): if not ta_version: version = Version.from_git() if not version.stage: - stage = 'R' + stage = "R" else: stage = version.stage[:1] @@ -686,13 +735,11 @@ def _generate(source, config, ta_version, outputdir=None): version_str = ta_version if not os.path.exists(source): - raise NotADirectoryError( - "{} not Found.".format(os.path.abspath(source))) + raise NotADirectoryError("{} not Found.".format(os.path.abspath(source))) # Setting default value to Config argument if not config: - config = os.path.abspath( - os.path.join(source, PARENT_DIR, "globalConfig.json")) + config = os.path.abspath(os.path.join(source, PARENT_DIR, "globalConfig.json")) clean_before_build(outputdir) @@ -708,7 +755,8 @@ def _generate(source, config, ta_version, outputdir=None): logger.error( f"Manifest file @ {app_manifest_path} has invalid format.\n" f"Please refer to {APP_MANIFEST_WEBSITE}.\n" - f"Lines with comments are supported if they start with \"#\".\n") + f'Lines with comments are supported if they start with "#".\n' + ) sys.exit(1) ta_name = manifest.get_addon_name() @@ -734,7 +782,7 @@ def _generate(source, config, ta_version, outputdir=None): ta_tabs = schema_content.get("pages").get("configuration").get("tabs") ta_namespace = schema_content.get("meta").get("restRoot") import_declare_name = "import_declare_test" - is_inputs = ("inputs" in schema_content.get("pages")) + is_inputs = "inputs" in schema_content.get("pages") logger.info("Package ID is " + ta_name) @@ -746,28 +794,29 @@ def _generate(source, config, ta_version, outputdir=None): logger.info("Copy globalConfig to output") shutil.copyfile( config, - os.path.join(outputdir, ta_name, "appserver", "static", "js", - "build", "globalConfig.json"), + os.path.join( + outputdir, + ta_name, + "appserver", + "static", + "js", + "build", + "globalConfig.json", + ), ) ucc_lib_target = os.path.join(outputdir, ta_name, "lib") - logger.info( - f"Install Addon Requirements into {ucc_lib_target} from {source}") - install_libs( - source, - ucc_lib_target - ) + logger.info(f"Install Addon Requirements into {ucc_lib_target} from {source}") + install_libs(source, ucc_lib_target) replace_token(ta_name, outputdir) generate_rest(ta_name, scheme, import_declare_name, outputdir) modify_and_replace_token_for_oauth_templates( - ta_name, ta_tabs, schema_content.get('meta').get('version'), outputdir + ta_name, ta_tabs, schema_content.get("meta").get("version"), outputdir ) if is_inputs: - add_modular_input( - ta_name, schema_content, import_declare_name, outputdir - ) + add_modular_input(ta_name, schema_content, import_declare_name, outputdir) else: handle_no_inputs(ta_name, outputdir) @@ -776,26 +825,26 @@ def _generate(source, config, ta_version, outputdir=None): else: logger.info("Addon Version : " + ta_version) logger.warning( - "Skipped installing UCC required python modules as GlobalConfig.json does not exist.") + "Skipped installing UCC required python modules as GlobalConfig.json does not exist." + ) logger.warning( - "Skipped Generating UI components as GlobalConfig.json does not exist.") + "Skipped Generating UI components as GlobalConfig.json does not exist." + ) logger.info("Setting TA name as generic") ucc_lib_target = os.path.join(outputdir, ta_name, "lib") - install_libs( - source, - ucc_lib_target=ucc_lib_target - ) + install_libs(source, ucc_lib_target=ucc_lib_target) - ignore_list = get_ignore_list(ta_name, os.path.abspath( - os.path.join(source, PARENT_DIR, ".uccignore"))) + ignore_list = get_ignore_list( + ta_name, os.path.abspath(os.path.join(source, PARENT_DIR, ".uccignore")) + ) remove_listed_files(ignore_list) logger.info("Copy package directory ") recursive_overwrite(source, os.path.join(outputdir, ta_name)) # Update app.manifest - with open(os.path.join(outputdir, ta_name, 'VERSION'), 'w') as version_file: + with open(os.path.join(outputdir, ta_name, "VERSION"), "w") as version_file: version_file.write(version_str) version_file.write("\n") version_file.write(ta_version) @@ -809,49 +858,48 @@ def _generate(source, config, ta_version, outputdir=None): comment_map = save_comments(outputdir, ta_name) app_config = configparser.ConfigParser() - app_config.read_file( - open(os.path.join(outputdir, ta_name, 'default', "app.conf"))) - if not 'launcher' in app_config: - app_config.add_section('launcher') - if not 'id' in app_config: - app_config.add_section('id') - if not 'install' in app_config: - app_config.add_section('install') - if not 'package' in app_config: - app_config.add_section('package') - if not 'ui' in app_config: - app_config.add_section('ui') - - app_config['launcher']['version'] = ta_version - app_config['launcher']['description'] = manifest.get_description() - - app_config['id']['version'] = ta_version - - app_config['install']['build'] = str(int(time.time())) - app_config['package']['id'] = ta_name - - app_config['ui']['label'] = manifest.get_title() - - with open(os.path.join(outputdir, ta_name, 'default', "app.conf"), - 'w') as configfile: + app_config.read_file(open(os.path.join(outputdir, ta_name, "default", "app.conf"))) + if not "launcher" in app_config: + app_config.add_section("launcher") + if not "id" in app_config: + app_config.add_section("id") + if not "install" in app_config: + app_config.add_section("install") + if not "package" in app_config: + app_config.add_section("package") + if not "ui" in app_config: + app_config.add_section("ui") + + app_config["launcher"]["version"] = ta_version + app_config["launcher"]["description"] = manifest.get_description() + + app_config["id"]["version"] = ta_version + + app_config["install"]["build"] = str(int(time.time())) + app_config["package"]["id"] = ta_name + + app_config["ui"]["label"] = manifest.get_title() + + with open( + os.path.join(outputdir, ta_name, "default", "app.conf"), "w" + ) as configfile: app_config.write(configfile) # restore License header restore_comments(outputdir, ta_name, comment_map) # Copy Licenses - license_dir = os.path.abspath( - os.path.join(source, PARENT_DIR, "LICENSES")) + license_dir = os.path.abspath(os.path.join(source, PARENT_DIR, "LICENSES")) if os.path.exists(license_dir): logger.info("Copy LICENSES directory ") - recursive_overwrite(license_dir, - os.path.join(outputdir, ta_name, "LICENSES")) + recursive_overwrite(license_dir, os.path.join(outputdir, ta_name, "LICENSES")) - if os.path.exists(os.path.abspath( - os.path.join(source, PARENT_DIR, "additional_packaging.py"))): - sys.path.insert(0, - os.path.abspath(os.path.join(source, PARENT_DIR))) + if os.path.exists( + os.path.abspath(os.path.join(source, PARENT_DIR, "additional_packaging.py")) + ): + sys.path.insert(0, os.path.abspath(os.path.join(source, PARENT_DIR))) from additional_packaging import additional_packaging + additional_packaging(ta_name) @@ -864,22 +912,22 @@ def main(): parser.add_argument( "--source", type=str, - nargs='?', + nargs="?", help="Folder containing the app.manifest and app source", default="package", ) parser.add_argument( "--config", type=str, - nargs='?', + nargs="?", help="Path to configuration file, defaults to GlobalConfig.json in parent directory of source provided", - default=None + default=None, ) parser.add_argument( "--ta-version", type=str, help="Version of TA, default version is version specified in the package such as app.manifest, app.conf, and globalConfig.json", - default=None + default=None, ) args = parser.parse_args() _generate(args.source, args.config, args.ta_version) diff --git a/splunk_add_on_ucc_framework/alert_utils/__init__.py b/splunk_add_on_ucc_framework/alert_utils/__init__.py index b9ea4650a..46020c32c 100644 --- a/splunk_add_on_ucc_framework/alert_utils/__init__.py +++ b/splunk_add_on_ucc_framework/alert_utils/__init__.py @@ -14,4 +14,4 @@ # limitations under the License. # # -# SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +# SPDX-License-Identifier: Apache-2.0 diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/__init__.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/__init__.py index b9ea4650a..46020c32c 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/__init__.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/__init__.py @@ -14,4 +14,4 @@ # limitations under the License. # # -# SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +# SPDX-License-Identifier: Apache-2.0 diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/builder_constant.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/builder_constant.py index ea4175be6..dcb0caa41 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/builder_constant.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/builder_constant.py @@ -22,13 +22,14 @@ # can not use common_util.make_splunk_path here. cylic import BUILDER_WORKSPACE_ROOT = make_splunkhome_path( - ['etc', 'apps', ADDON_BUILDER_APP_NAME, 'local', 'builder_workspace']) + ["etc", "apps", ADDON_BUILDER_APP_NAME, "local", "builder_workspace"] +) # cookie related keys -TA_NAME = 'ta_builder_current_ta_name' -TA_DISPLAY_NAME = 'ta_builder_current_ta_display_name' +TA_NAME = "ta_builder_current_ta_name" +TA_DISPLAY_NAME = "ta_builder_current_ta_display_name" TA_WIZARD_STEP = "ta_builder_wizard" -BUILT_FLAG = 'built_by_tabuilder' +BUILT_FLAG = "built_by_tabuilder" COOKIE_KEYS = [TA_NAME, TA_WIZARD_STEP, BUILT_FLAG, TA_DISPLAY_NAME] COOKIE_EXPIRES_DAY = 30 @@ -43,22 +44,18 @@ APP_CERT_STANZA = "app_cert" # global settings -USR_CREDENTIAL_SETTING = 'credential_settings' -PROXY_SETTING = 'proxy_settings' -LOG_SETTINGS = 'log_settings' -CUSTOMIZED_SETTINGS = 'customized_settings' -CREDENTIAL_SCHEMA = 'default_account' -LOG_SCHEMA = 'default_logging' -PROXY_SCHEMA = 'default_proxy' -CUSTOMIZED_BOOL_SCHEMA = 'bool' -CUSTOMIZED_TEXT_SCHEMA = 'text' -CUSTOMIZED_PASSWORD_SCHEMA = 'password' +USR_CREDENTIAL_SETTING = "credential_settings" +PROXY_SETTING = "proxy_settings" +LOG_SETTINGS = "log_settings" +CUSTOMIZED_SETTINGS = "customized_settings" +CREDENTIAL_SCHEMA = "default_account" +LOG_SCHEMA = "default_logging" +PROXY_SCHEMA = "default_proxy" +CUSTOMIZED_BOOL_SCHEMA = "bool" +CUSTOMIZED_TEXT_SCHEMA = "text" +CUSTOMIZED_PASSWORD_SCHEMA = "password" -CUSTOMIZED_TYPE_MAP = { - "text": "text", - "checkbox": "bool", - "password": "password" -} +CUSTOMIZED_TYPE_MAP = {"text": "text", "checkbox": "bool", "password": "password"} # cim mapping settings CONF_MODELS = "aob_models" diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/conf_parser.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/conf_parser.py index 67d403996..220aaa144 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/conf_parser.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/conf_parser.py @@ -40,18 +40,18 @@ COMMENT_PREFIX = ";#*" COMMENT_KEY = "__COMMENTS__" -class TABConfigParser(configparser.RawConfigParser): +class TABConfigParser(configparser.RawConfigParser): def _read(self, fp, fpname): """ Override the built-in _read() method to read comments """ from configparser import DEFAULTSECT, ParsingError - cursect = None # None, or a dictionary + cursect = None # None, or a dictionary optname = None lineno = 0 - e = None # None, or an exception + e = None # None, or an exception ######## tab_update ######## comment_index = 0 @@ -67,10 +67,10 @@ def _read(self, fp, fpname): lineno = lineno + 1 line = line.strip(" ") # comment or blank line? - if line.strip() == '' or line[0] in COMMENT_PREFIX: + if line.strip() == "" or line[0] in COMMENT_PREFIX: - ######## tab_update ######## - # save the lineno & comments + ######## tab_update ######## + # save the lineno & comments if cursect: name = "{}{}".format(COMMENT_KEY, comment_index) comment_index += 1 @@ -80,7 +80,7 @@ def _read(self, fp, fpname): continue ############################ - if line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR": + if line.split(None, 1)[0].lower() == "rem" and line[0] in "rR": # no leading whitespace continue # continuation line? @@ -106,14 +106,14 @@ def _read(self, fp, fpname): # is it a section header? mo = self.SECTCRE.match(line) if mo: - sectname = mo.group('header') + sectname = mo.group("header") if sectname in self._sections: cursect = self._sections[sectname] elif sectname == DEFAULTSECT: cursect = self._defaults else: cursect = self._dict() - cursect['__name__'] = sectname + cursect["__name__"] = sectname self._sections[sectname] = cursect # So sections can't start with a continuation line optname = None @@ -122,28 +122,28 @@ def _read(self, fp, fpname): elif cursect is None: ######## tab_update ######## # disable the exception since splunk allows the field outside stanzas -# raise MissingSectionHeaderError(fpname, lineno, line) + # raise MissingSectionHeaderError(fpname, lineno, line) self.fields_outside_stanza.append(line) ############################ # an option line? else: mo = self._optcre.match(line) if mo: - optname, vi, optval = mo.group('option', 'vi', 'value') + optname, vi, optval = mo.group("option", "vi", "value") optname = self.optionxform(optname.rstrip()) # This check is fine because the OPTCRE cannot # match if it would set optval to None if optval is not None: - if vi in ('=', ':') and ';' in optval: + if vi in ("=", ":") and ";" in optval: # ';' is a comment delimiter only if it follows # a spacing character - pos = optval.find(';') - if pos != -1 and optval[pos-1].isspace(): + pos = optval.find(";") + if pos != -1 and optval[pos - 1].isspace(): optval = optval[:pos] optval = optval.strip() # allow empty values if optval == '""': - optval = '' + optval = "" cursect[optname] = [optval] else: # valueless option handling @@ -166,8 +166,7 @@ def _read(self, fp, fpname): for options in all_sections: for name, val in list(options.items()): if isinstance(val, list): - options[name] = '\n'.join(val) - + options[name] = "\n".join(val) def write(self, fp): """ @@ -188,7 +187,7 @@ def write(self, fp): if self._defaults: fp.write("[%s]\n" % DEFAULTSECT) for (key, value) in list(self._defaults.items()): - fp.write("{} = {}\n".format(key, str(value).replace('\n', '\n\t'))) + fp.write("{} = {}\n".format(key, str(value).replace("\n", "\n\t"))) fp.write("\n") for section in self._sections: fp.write("[%s]\n" % section) @@ -206,7 +205,7 @@ def write(self, fp): ############################ if (value is not None) or (self._optcre == self.OPTCRE): - key = " = ".join((key, str(value).replace('\n', '\n\t'))) + key = " = ".join((key, str(value).replace("\n", "\n\t"))) fp.write("%s\n" % (key)) ######## tab_update ######## # write the seperator line for stanza @@ -223,10 +222,10 @@ def items(self, section): items = configparser.RawConfigParser.items(self, section) res = [] - for k,v in items: + for k, v in items: if k.startswith(COMMENT_KEY): continue - res.append((k,v)) + res.append((k, v)) return res def options(self, section): @@ -245,8 +244,12 @@ def item_dict(self): sections = dict(self._sections) for section, key_values in list(sections.items()): kv = {} - for k,v in list(key_values.items()): - if not isinstance(k, str) or k.startswith(COMMENT_KEY) or k == "__name__": + for k, v in list(key_values.items()): + if ( + not isinstance(k, str) + or k.startswith(COMMENT_KEY) + or k == "__name__" + ): continue kv[k] = v if kv: diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/logger.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/logger.py index a6a0b10fa..f640f4ecd 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/logger.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/logger.py @@ -40,7 +40,7 @@ "global_settings_builder": "ta_builder", "modular_alert": "ta_builder", "modular_alert_testing": "ta_builder", - "package_add_on": "package_add_on" + "package_add_on": "package_add_on", } g_log_levels = {} @@ -52,78 +52,83 @@ # set the context of the log log.Logs.set_context(namespace=builder_constant.ADDON_BUILDER_APP_NAME) -@metric_util.function_run_time(tags=['tab_logger']) + +@metric_util.function_run_time(tags=["tab_logger"]) def get_package_add_on_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("package_add_on"), level) -@metric_util.function_run_time(tags=['tab_logger']) + +@metric_util.function_run_time(tags=["tab_logger"]) def get_field_extraction_builder_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("field_extraction_builder"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_builder_util_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("utility"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_meta_manager_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("meta_manager"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_validation_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("validation"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_field_extraction_validator_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("field_extraction_validator"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_app_cert_validator_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("app_cert_validator"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_sourcetype_builder_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("sourcetype"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_app_instance_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("app"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_generator_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("generator"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_cim_builder_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("cim_builder"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_input_builder_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("input_builder"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_setup_builder_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("setup_builder"), level) -@metric_util.function_run_time(tags=['tab_logger']) + +@metric_util.function_run_time(tags=["tab_logger"]) def get_modular_alert_builder_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("modular_alert"), level) -@metric_util.function_run_time(tags=['tab_logger']) + +@metric_util.function_run_time(tags=["tab_logger"]) def get_modular_alert_testing_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("modular_alert_testing"), level) -@metric_util.function_run_time(tags=['tab_logger']) + +@metric_util.function_run_time(tags=["tab_logger"]) def set_log_level(level, name=None): if name: logger = _get_logger(name) @@ -134,7 +139,7 @@ def set_log_level(level, name=None): logger.setLevel(level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def _get_logger(name, level=logging.INFO): with g_logger_lock: l = None @@ -151,23 +156,26 @@ def _get_logger(name, level=logging.INFO): return l -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_alert_builder_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("alert_builder"), level) -@metric_util.function_run_time(tags=['tab_logger']) +@metric_util.function_run_time(tags=["tab_logger"]) def get_global_settings_builder_logger(level=DEFAULT_LOG_LEVEL): return _get_logger(LOGS.get("global_settings_builder"), level) -HIDDEN_FIELDS = ['password', 'credential', 'value'] + +HIDDEN_FIELDS = ["password", "credential", "value"] + + def hide_sensitive_field(raw): if isinstance(raw, dict): cloned = dict(raw) for k in list(raw.keys()): lower_key = k.lower() if any([i in lower_key for i in HIDDEN_FIELDS]): - cloned[k] = '*******' + cloned[k] = "*******" else: cloned[k] = hide_sensitive_field(cloned[k]) return cloned diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/__init__.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/__init__.py index b9ea4650a..46020c32c 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/__init__.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/__init__.py @@ -14,4 +14,4 @@ # limitations under the License. # # -# SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +# SPDX-License-Identifier: Apache-2.0 diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/event_writer.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/event_writer.py index 23af54980..cf781a7f8 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/event_writer.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/event_writer.py @@ -25,30 +25,30 @@ def message(app, current_time, event, tags): final_event = dict(event) - final_event['mcollector_event_ts'] = current_time - final_event['mcollector_target_app'] = app + final_event["mcollector_event_ts"] = current_time + final_event["mcollector_target_app"] = app if tags: - final_event['mcollector_tags'] = tags + final_event["mcollector_tags"] = tags return json.dumps(final_event) class MetricEventWriter: def __init__(self, app, config): - ''' + """ config is a dict, which contains all the params for file writer - tag_black_list: a list of tags, if the tags are in the black list, do not write the event - tag_white_list: a list of tags, only write the event has this tags. white_list has higer priority than black_list - ''' + """ assert app is not None assert isinstance(config, dict) self._app = app - self._deny_list_tags = config.get('tag_black_list', []) - self._allow_list_tags = config.get('tag_white_list', []) + self._deny_list_tags = config.get("tag_black_list", []) + self._allow_list_tags = config.get("tag_white_list", []) def write_event(self, ev, tags=[]): - ''' + """ ev. event dict, it might be a hierarchy structure - ''' + """ assert isinstance(ev, dict) assert isinstance(tags, list) if self._allow_list_tags: @@ -71,34 +71,34 @@ def _flush_event(self, ev, tags): self._flush_msg(message(self._app, ctime, ev, tags)) def _flush_msg(self, msg): - ''' + """ this should be implemented - ''' - raise NotImplemented('_flush_msg should be implemented.') + """ + raise NotImplemented("_flush_msg should be implemented.") def update_config(self, config): - self._deny_list_tags = config.get('tag_black_list', []) - self._allow_list_tags = config.get('tag_white_list', []) + self._deny_list_tags = config.get("tag_black_list", []) + self._allow_list_tags = config.get("tag_white_list", []) class FileEventWriter(MetricEventWriter): def __init__(self, app, config): super().__init__(app, config) - self._logger_name = config.get('logger', 'default') + '_metric_events' + self._logger_name = config.get("logger", "default") + "_metric_events" self._logger = log.Logs().get_logger(self._logger_name) - self._log_level = config.get('loglevel', 'INFO') + self._log_level = config.get("loglevel", "INFO") log.Logs().set_level(self._log_level, self._logger_name) # reset the formater of log handler for handler in self._logger.handlers: - handler.setFormatter(logging.Formatter('%(message)s')) + handler.setFormatter(logging.Formatter("%(message)s")) def update_config(self, config): super().update_config(config) - l_name = config.get('logger', 'default') + '_metric_events' + l_name = config.get("logger", "default") + "_metric_events" if l_name != self._logger_name: self._logger_name = l_name self._logger = log.Logs().get_logger(self._logger_name) - l_level = config.get('loglevel', 'INFO') + l_level = config.get("loglevel", "INFO") if self._log_level != l_level: self._log_level = l_level log.Logs().set_level(self._log_level, self._logger_name) @@ -106,10 +106,12 @@ def update_config(self, config): def _flush_msg(self, msg): self._logger.info(msg) + class SplunkStashFileWriter(MetricEventWriter): - ''' + """ write a small file and use splunk rest upload this file - ''' + """ + def __init__(self, app, config): super().__init__(app, config) pass diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/memory_event_writer.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/memory_event_writer.py index 3f13f9601..7da512be2 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/memory_event_writer.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/memory_event_writer.py @@ -23,14 +23,14 @@ class MemoryEventWriter(MetricEventWriter): - ''' - This class is used to mock a in memory event sink. - Be careful: all the events are stored in the memory queue - ''' + """ + This class is used to mock a in memory event sink. + Be careful: all the events are stored in the memory queue + """ def __init__(self, app, config): super().__init__(app, config) - max_queue_size = config.get('max_queue_size', 0) + max_queue_size = config.get("max_queue_size", 0) self.q = queue.Queue(maxsize=max_queue_size) def _flush_msg(self, msg): diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_aggregator.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_aggregator.py index 7a18ca534..88a70bc82 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_aggregator.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_aggregator.py @@ -17,19 +17,19 @@ # encode=utf-8 - class NumberMetricArregator: - ''' + """ aggregate the metric with the the second interval. for minute and hour aggregate, use splunk to do it. - ''' + """ + def __init__(self, metric_name, event_writer, metric_tags=None, max_time_span=10): - ''' + """ @params: metric_name - a literal string for the metric @params: an MetricEventWriter object which flushes the aggregated metric events @params: max_time_span - the aggregator creates max_time_span buckets. Assume the metric event delay will not be large than max_time_span seconds. - ''' + """ self.m_name = metric_name self.m_span = max_time_span self.event_writer = event_writer @@ -38,23 +38,23 @@ def __init__(self, metric_name, event_writer, metric_tags=None, max_time_span=10 self.buckets = dict() def aggregate_metric(self, record): - ''' + """ this method is invoked in the worker thread. - ''' + """ if self.oldest_metric_time is None: - self.oldest_metric_time = record['ts'] + self.oldest_metric_time = record["ts"] return self._aggregate_metric(record) def _aggregate_metric(self, record): - raise NotImplemented('Children class must implement _aggregate_metric') + raise NotImplemented("Children class must implement _aggregate_metric") def _format_metric(self, metric): - raise NotImplemented('Children class must implement _format_metric') + raise NotImplemented("Children class must implement _format_metric") def get_formatted_metric(self, ts, metric): formatted_metric = self._format_metric(metric) - formatted_metric['time_slot'] = ts - formatted_metric['metric_name'] = self.m_name + formatted_metric["time_slot"] = ts + formatted_metric["metric_name"] = self.m_name return formatted_metric def _flush_buckets(self, timestamp): @@ -65,52 +65,68 @@ def _flush_buckets(self, timestamp): for ts in range(self.oldest_metric_time, low_water_mark): metric = self.buckets.get(ts, None) if metric: - self.event_writer.write_event(self.get_formatted_metric(ts, metric), self.metric_tags) + self.event_writer.write_event( + self.get_formatted_metric(ts, metric), self.metric_tags + ) del self.buckets[ts] self.oldest_metric_time = low_water_mark def flush_all_event(self): for ts, metric in list(self.buckets.items()): - self.event_writer.write_event(self.get_formatted_metric(ts, metric), self.metric_tags) + self.event_writer.write_event( + self.get_formatted_metric(ts, metric), self.metric_tags + ) class NumberMetricSum(NumberMetricArregator): - ''' + """ add all the numbers in the same time slot - ''' + """ + def __init__(self, metric_name, event_writer, metric_tags=None, max_time_span=10): super().__init__(metric_name, event_writer, metric_tags, max_time_span) def _aggregate_metric(self, record): - ts = record['ts'] - value = record['v'] + ts = record["ts"] + value = record["v"] bucket_value = self.buckets.get(ts, None) if bucket_value is None: - self.buckets[ts] = {'sum': value} + self.buckets[ts] = {"sum": value} else: - bucket_value['sum'] = bucket_value['sum'] + value + bucket_value["sum"] = bucket_value["sum"] + value self.buckets[ts] = bucket_value self._flush_buckets(ts) def _format_metric(self, metric): - return {'sum': metric['sum']} + return {"sum": metric["sum"]} + class NumberMetricAverage(NumberMetricArregator): - ''' + """ calculate the average number for the metric - ''' + """ + def __init__(self, metric_name, event_writer, metric_tags=None, max_time_span=10): super().__init__(metric_name, event_writer, metric_tags, max_time_span) def _format_metric(self, metric): - return {'avg': float(metric['sum'])/metric['count'], 'max': metric['max'], 'min': metric['min']} + return { + "avg": float(metric["sum"]) / metric["count"], + "max": metric["max"], + "min": metric["min"], + } def _aggregate_metric(self, record): - ts = record['ts'] - value = record['v'] + ts = record["ts"] + value = record["v"] bucket_value = self.buckets.get(ts, None) if bucket_value is None: - self.buckets[ts] = {'sum': value, 'count': 1, 'max': value, 'min': value} + self.buckets[ts] = {"sum": value, "count": 1, "max": value, "min": value} else: - self.buckets[ts] = {'sum': bucket_value['sum'] + value, 'count': bucket_value['count'] + 1, 'max': value if value > bucket_value['max'] else bucket_value['max'], 'min': value if value < bucket_value['min'] else bucket_value['min']} + self.buckets[ts] = { + "sum": bucket_value["sum"] + value, + "count": bucket_value["count"] + 1, + "max": value if value > bucket_value["max"] else bucket_value["max"], + "min": value if value < bucket_value["min"] else bucket_value["min"], + } self._flush_buckets(ts) diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_exception.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_exception.py index 462615474..4f6629d47 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_exception.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_exception.py @@ -16,6 +16,7 @@ # encode=utf-8 + class MetricException(Exception): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_util.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_util.py index 30a68ac15..8a6f782d2 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_util.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/metric_util.py @@ -25,18 +25,18 @@ from . import monitor -__all__ = ['initialize_metric_collector', 'function_run_time'] +__all__ = ["initialize_metric_collector", "function_run_time"] def initialize_metric_collector(config, update_config=False): - ''' + """ config is a configuration dict. - app: required field, define the app name for the monitor - event_writer: event writer type, must be one of ['file', 'hec'] - writer_config: a dict, define the configuration for the event writer. different event writer has different configurations. update_config: force to update the configure - ''' + """ m = monitor.Monitor().configure(config, force_update=update_config) m.start() @@ -45,14 +45,14 @@ def write_event(ev, tags=[]): monitor.Monitor().write_event(ev, tags) -CREDENTIAL_KEYS = ['password', 'passwords', 'token'] +CREDENTIAL_KEYS = ["password", "passwords", "token"] def mask_credentials(data): - ''' + """ The argument will be cloned - ''' - masked_str = '******' + """ + masked_str = "******" if isinstance(data, dict): new_data = {} for k in list(data.keys()): @@ -85,7 +85,7 @@ def mask_credentials(data): if sensitive_word: return masked_str elif not isinstance(data, (int, float)): - return 'Class:' + data.__class__.__name__ + return "Class:" + data.__class__.__name__ return data @@ -97,17 +97,18 @@ def apm_decorator(func): @functools.wraps(func) def func_wrappers(*args, **kwargs): m = monitor.Monitor() - func_attr = {'function_name': func.__name__, - 'positional_args': str(mask_credentials(args))[0:max_length], - 'keyword_arguments': str(mask_credentials(kwargs))[0:max_length]} - ev = {'action': 'invoke'} + func_attr = { + "function_name": func.__name__, + "positional_args": str(mask_credentials(args))[0:max_length], + "keyword_arguments": str(mask_credentials(kwargs))[0:max_length], + } + ev = {"action": "invoke"} ev.update(func_attr) m.write_event(ev, tags) before_invoke = int(time.time() * 1000) ret = func(*args, **kwargs) after_invoke = int(time.time() * 1000) - ev = {'action': 'done', - 'time_cost': (after_invoke - before_invoke)} + ev = {"action": "done", "time_cost": (after_invoke - before_invoke)} ev.update(func_attr) m.write_event(ev, tags) return ret diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/monitor.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/monitor.py index ee9d017bc..f4206a33d 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/monitor.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/monitor.py @@ -24,15 +24,16 @@ from .metric_exception import MetricException from .number_metric_collector import NumberMetricCollector -__all__ = ['Monitor'] +__all__ = ["Monitor"] + +logger = log.Logs().get_logger("metric_collector") -logger = log.Logs().get_logger('metric_collector') class Monitor(with_metaclass(pattern.Singleton, object)): EVENT_WRITERS = { - 'memory': memory_event_writer.MemoryEventWriter, - 'file': event_writer.FileEventWriter, - 'hec': event_writer.FileEventWriter # TODO; implement a HEC writer + "memory": memory_event_writer.MemoryEventWriter, + "file": event_writer.FileEventWriter, + "hec": event_writer.FileEventWriter, # TODO; implement a HEC writer } def __init__(self): @@ -43,45 +44,48 @@ def __init__(self): self.worker_start_lock = threading.Lock() def configure(self, conf, force_update=False): - ''' + """ conf is a dict. -- app: the app name -- event_writer: the type of event writer. -- writer_config: a dict which contains the configuration for the event writer force_update: when this is True, update the config of monitor - ''' + """ if self.number_metric_collector is not None and force_update is False: - logger.info('monitor has been initialized.') + logger.info("monitor has been initialized.") return self - self._app = conf.get('app', self._app) + self._app = conf.get("app", self._app) if not self._app: - raise MetricException('app is not found in configuration object.') + raise MetricException("app is not found in configuration object.") - event_writer = str.lower(conf.get('event_writer', 'file')) + event_writer = str.lower(conf.get("event_writer", "file")) if self._ewriter_type != event_writer: if event_writer not in self.EVENT_WRITERS: - raise MetricException('Unknown event writer type:' + - event_writer) + raise MetricException("Unknown event writer type:" + event_writer) ewriter_cls = self.EVENT_WRITERS[event_writer] - writer_config = conf.get('writer_config', {}) + writer_config = conf.get("writer_config", {}) self._ewriter_type = event_writer self._ewriter = ewriter_cls(self._app, writer_config) else: - writer_config = conf.get('writer_config', {}) + writer_config = conf.get("writer_config", {}) if self._ewriter: self._ewriter.update_config(writer_config) else: - raise MetricException('event writer type is ' + - self._ewriter_type + - ', while event writer is None.') + raise MetricException( + "event writer type is " + + self._ewriter_type + + ", while event writer is None." + ) if self.number_metric_collector is None: self.number_metric_collector = NumberMetricCollector(self._ewriter) else: self.number_metric_collector.set_event_writer(self._ewriter) - logger.info('Initialize monitor successfully. app=%s, config:%s', self._app, conf) + logger.info( + "Initialize monitor successfully. app=%s, config:%s", self._app, conf + ) return self def start(self): @@ -90,9 +94,9 @@ def start(self): if self.number_metric_collector.is_stopped(): self.number_metric_collector.start() else: - logger.info('Collector thread has been started.') + logger.info("Collector thread has been started.") else: - raise ValueError('Monitor is not configued yet.') + raise ValueError("Monitor is not configued yet.") def stop(self): if self.number_metric_collector: @@ -109,14 +113,19 @@ def write_event(self, ev, tags=[]): if self._ewriter: self._ewriter.write_event(ev, tags) - def register_metric(self, - metric_name, - metric_type=NumberMetricCollector.SUM_METRIC, - metric_tags=[], - max_time_span=10): + def register_metric( + self, + metric_name, + metric_type=NumberMetricCollector.SUM_METRIC, + metric_tags=[], + max_time_span=10, + ): self.number_metric_collector.register_metric( - metric_name, metric_type, metric_tags, max_time_span) + metric_name, metric_type, metric_tags, max_time_span + ) def record_metric(self, metric_name, metric_value, metric_timestamp=None): if self.number_metric_collector: - self.number_metric_collector.record_metric(metric_name, metric_value, metric_timestamp) + self.number_metric_collector.record_metric( + metric_name, metric_value, metric_timestamp + ) diff --git a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/number_metric_collector.py b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/number_metric_collector.py index 837529ed5..febe5413d 100644 --- a/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/number_metric_collector.py +++ b/splunk_add_on_ucc_framework/alert_utils/alert_utils_common/metric_collector/number_metric_collector.py @@ -27,14 +27,14 @@ from . import metric_aggregator from .metric_exception import MetricException -__all__ = ['NumberMetricCollector'] +__all__ = ["NumberMetricCollector"] -logger = log.Logs().get_logger('metric_collector') +logger = log.Logs().get_logger("metric_collector") -class AggregatorWorker(threading.Thread): +class AggregatorWorker(threading.Thread): def __init__(self, collector): - super().__init__(name='AggregatorWorker') + super().__init__(name="AggregatorWorker") self.daemon = True self.collector = collector self.metric_aggregators = dict() @@ -48,8 +48,8 @@ def run(self): class NumberMetricCollector: - SUM_METRIC = 'sum' - AVG_METRIC = 'avg' + SUM_METRIC = "sum" + AVG_METRIC = "avg" def __init__(self, event_writer): self.event_writer = event_writer @@ -66,46 +66,48 @@ def is_stopped(self): else: return not self.worker.is_alive() - def register_metric(self, - metric_name, - metric_type=SUM_METRIC, - metric_tags=[], - max_time_span=10): + def register_metric( + self, metric_name, metric_type=SUM_METRIC, metric_tags=[], max_time_span=10 + ): if metric_name in self.aggregators: - raise MetricException('metric {} has been registered.'.format( - metric_name)) + raise MetricException("metric {} has been registered.".format(metric_name)) if metric_type == self.AVG_METRIC: - self.aggregators[ - metric_name] = metric_aggregator.NumberMetricAverage( - metric_name, self.event_writer, metric_tags, max_time_span) + self.aggregators[metric_name] = metric_aggregator.NumberMetricAverage( + metric_name, self.event_writer, metric_tags, max_time_span + ) elif metric_type == self.SUM_METRIC: self.aggregators[metric_name] = metric_aggregator.NumberMetricSum( - metric_name, self.event_writer, metric_tags, max_time_span) + metric_name, self.event_writer, metric_tags, max_time_span + ) else: - raise MetricException('Metric type {} is unsupported.'.format( - metric_type)) + raise MetricException("Metric type {} is unsupported.".format(metric_type)) def record_metric(self, metric_name, metric_value, metric_timestamp=None): if metric_name not in self.aggregators: - raise MetricException('metric {} is not registered yet.'.format( - metric_name)) + raise MetricException( + "metric {} is not registered yet.".format(metric_name) + ) if metric_timestamp is None: metric_timestamp = int(time.time()) - self.record_queue.put({'type': 'metric', - 'ts': metric_timestamp, - 'v': metric_value, - 'n': metric_name}) + self.record_queue.put( + { + "type": "metric", + "ts": metric_timestamp, + "v": metric_value, + "n": metric_name, + } + ) def start(self): if self.is_stopped() == False: - raise RuntimeError('collector worker has been started.') + raise RuntimeError("collector worker has been started.") if self.worker: - raise RuntimeError('Worker thread is stopped, but the worker is not None.') + raise RuntimeError("Worker thread is stopped, but the worker is not None.") self.worker = AggregatorWorker(self) self.worker.start() def stop(self, graceful=True): - self.record_queue.put({'type': 'stop'}) + self.record_queue.put({"type": "stop"}) if graceful: self.join() self.worker = None @@ -115,9 +117,9 @@ def join(self): self.worker.join() def process(self): - ''' + """ the entry point for the worker thread - ''' + """ is_stop = False skip_metrics = [] record = None @@ -126,10 +128,10 @@ def process(self): except queue.Empty: pass if record: - if record['type'] == 'stop': + if record["type"] == "stop": is_stop = True else: - metric_name = record['n'] + metric_name = record["n"] self.aggregators[metric_name].aggregate_metric(record) skip_metrics.append(metric_name) self._flush_events(skip_metrics) diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/__init__.py b/splunk_add_on_ucc_framework/modular_alert_builder/__init__.py index b9ea4650a..46020c32c 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/__init__.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/__init__.py @@ -14,4 +14,4 @@ # limitations under the License. # # -# SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +# SPDX-License-Identifier: Apache-2.0 diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/__init__.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/__init__.py index 3503ae9f8..ad3dd86d3 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/__init__.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/__init__.py @@ -41,9 +41,9 @@ def check_file_name(file_name, env): for gp in search: if gp in env: new_str = new_str.replace("${%s}" % gp, env[gp], re.MULTILINE) - new_str = new_str.replace("${!%s}" % gp, - re.sub(r"[^\w]+", "_", env[gp].lower()), - re.MULTILINE) + new_str = new_str.replace( + "${!%s}" % gp, re.sub(r"[^\w]+", "_", env[gp].lower()), re.MULTILINE + ) # Disable the cache to avoid conflict # cache_path[file_name] = new_str @@ -73,23 +73,21 @@ def prepare_ta_directory_tree(src, dest, logger, envs): return output_dir if os.path.exists(output_dir): - logger.info('event="output_dir=%s already exist"', - output_dir) - output_dir = os.path.join(output_dir, - envs["product_id"] + "_temp_output") + logger.info('event="output_dir=%s already exist"', output_dir) + output_dir = os.path.join(output_dir, envs["product_id"] + "_temp_output") logger.info('event="generate a new output_dir=%s"', output_dir) if os.path.exists(output_dir): shutil.rmtree(output_dir) try: # copy file - logger.info('event="Copying directory tree: src=%s dest=%s"', - src, output_dir) + logger.info('event="Copying directory tree: src=%s dest=%s"', src, output_dir) shutil.copytree(src, output_dir) # process each file's name - logger.info('event="Replace each file name\'s placeholder under dir=%s"', - output_dir) + logger.info( + 'event="Replace each file name\'s placeholder under dir=%s"', output_dir + ) move_list = [] for dirName, subdirList, fileList in os.walk(output_dir): move_list.extend(check_file_list(dirName, subdirList, envs)) @@ -106,8 +104,7 @@ def prepare_ta_directory_tree(src, dest, logger, envs): return output_dir -def generate_alerts(src, dest, logger, envs, process_list=None, - skip_list=None): +def generate_alerts(src, dest, logger, envs, process_list=None, skip_list=None): process_list = process_list or [] skip_list = skip_list or [] output_dir = dest @@ -129,21 +126,23 @@ def generate_alerts(src, dest, logger, envs, process_list=None, input_setting=envs["schema.content"], package_path=package_dir, logger=logger, - global_settings=global_settings) + global_settings=global_settings, + ) if build_components["html"]: html_return = generate_alert_actions_html_files( input_setting=envs["schema.content"], package_path=package_dir, logger=logger, - html_setting=envs["html_setting"]) + html_setting=envs["html_setting"], + ) if build_components["py"]: py_return = generate_alert_actions_py_files( input_setting=envs["schema.content"], package_path=package_dir, logger=logger, - global_settings=global_settings + global_settings=global_settings, ) if conf_return: @@ -158,13 +157,15 @@ def generate_alerts(src, dest, logger, envs, process_list=None, Which means the previous output_dir already there """ from . import alert_actions_merge + alert_actions_merge.merge( os.path.join(output_dir, envs["product_id"]), - os.path.join(dest, envs["product_id"])) + os.path.join(dest, envs["product_id"]), + ) logger.info('event="merged %s to %s', output_dir, dest) finally: if output_dir != dest and os.path.exists(output_dir): logger.info('clean temp_output_dir="%s"', output_dir) shutil.rmtree(output_dir) - return output_content \ No newline at end of file + return output_content diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_base.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_base.py index 9b6716846..dbfd0d03f 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_base.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_base.py @@ -15,7 +15,6 @@ # - import csv import gzip import sys @@ -37,8 +36,7 @@ def __init__(self, alert_name): # self._logger_name = "modalert_" + alert_name self._logger_name = alert_name + "_modalert" self._logger = Logs().get_logger(self._logger_name) - super().__init__( - sys.stdin.read(), self._logger, alert_name) + super().__init__(sys.stdin.read(), self._logger, alert_name) def log_error(self, msg): self._logger.error(msg) @@ -62,12 +60,12 @@ def run(self, argv): sys.exit(1) try: - with gzip.open(self.results_file, 'rb') as rh: + with gzip.open(self.results_file, "rb") as rh: for num, result in enumerate(csv.DictReader(rh)): - result.setdefault('rid', str(num)) + result.setdefault("rid", str(num)) self.update(result) self.dowork(result) except Exception as e: - self._logger.error(self.message(e, 'failure')) + self._logger.error(self.message(e, "failure")) self._logger.exception("exception=") sys.exit(2) diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_conf_gen.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_conf_gen.py index e37e0b5ea..f9a6853c6 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_conf_gen.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_conf_gen.py @@ -32,9 +32,16 @@ class AlertActionsConfBase: - def __init__(self, input_setting=None, package_path=None, logger=None, - template_dir=None, default_settings_file=None, - global_settings=None, **kwargs): + def __init__( + self, + input_setting=None, + package_path=None, + logger=None, + template_dir=None, + default_settings_file=None, + global_settings=None, + **kwargs + ): self._alert_conf_name = "alert_actions.conf" self._alert_spec_name = "alert_actions.conf.spec" self._eventtypes_conf = "eventtypes.conf" @@ -81,8 +88,10 @@ def get_icon_name(self, alert): return "alert_" + alert[ac.SHORT_NAME] + ".png" def get_icon_path(self, alert, create_dir_path=True): - return op.join(self.get_icon_dir(create_dir_path=create_dir_path), - self.get_icon_name(alert)) + return op.join( + self.get_icon_dir(create_dir_path=create_dir_path), + self.get_icon_name(alert), + ) class AlertActionsConfGeneration(AlertActionsConfBase): @@ -94,32 +103,49 @@ class AlertActionsConfGeneration(AlertActionsConfBase): DEFAULT_TAGS_TEMPLATE = "tags.conf.template" DEFAULT_APP_TEMPLATE = "app.conf.template" - def __init__(self, input_setting=None, package_path=None, logger=None, - template_dir=None, default_settings_file=None, **kwargs): + def __init__( + self, + input_setting=None, + package_path=None, + logger=None, + template_dir=None, + default_settings_file=None, + **kwargs + ): if not input_setting or not logger: msg = 'status="failed", required_args="input_setting, logger"' raise aae.AlertActionsInValidArgs(msg) super().__init__( input_setting=input_setting, - package_path=package_path, logger=logger, template_dir=template_dir, + package_path=package_path, + logger=logger, + template_dir=template_dir, default_settings_file=default_settings_file, **kwargs ) - self._html_fields = [ac.PARAMETERS, ] + self._html_fields = [ + ac.PARAMETERS, + ] self._remove_fields = [ac.SHORT_NAME] + self._html_fields self._temp_obj = AlertActionsTemplateMgr(template_dir) - self._default_settings_file = default_settings_file or \ - op.join(op.dirname(op.abspath(__file__)), - AlertActionsConfGeneration.DEFAULT_SETTINGS_FILE) + self._default_settings_file = default_settings_file or op.join( + op.dirname(op.abspath(__file__)), + AlertActionsConfGeneration.DEFAULT_SETTINGS_FILE, + ) def generate_conf(self): - self._logger.info('status="starting", operation="generate", ' + - 'object="alert_actions.conf", object_type="file"') - template = Template(filename=op.join( - self._temp_obj.get_template_dir(), - AlertActionsConfGeneration.DEFAULT_CONF_TEMPLATE)) + self._logger.info( + 'status="starting", operation="generate", ' + + 'object="alert_actions.conf", object_type="file"' + ) + template = Template( + filename=op.join( + self._temp_obj.get_template_dir(), + AlertActionsConfGeneration.DEFAULT_CONF_TEMPLATE, + ) + ) alert_obj = Munch.fromDict(self._alert_settings) try: final_string = template.render(mod_alerts=alert_obj) @@ -127,76 +153,91 @@ def generate_conf(self): print(exceptions.html_error_template().render()) raise text = linesep.join([s.strip() for s in final_string.splitlines()]) - write_file(self._alert_conf_name, - self.get_local_conf_file_path(), - text, - self._logger) - self._logger.info('status="success", operation="generate", ' + - 'object="alert_actions.conf", object_type="file"') + write_file( + self._alert_conf_name, self.get_local_conf_file_path(), text, self._logger + ) + self._logger.info( + 'status="success", operation="generate", ' + + 'object="alert_actions.conf", object_type="file"' + ) def generate_eventtypes(self): - self._logger.info('status="starting", operation="generate", ' + - 'object="eventtypes.conf", object_type="file"') - template = Template(filename=op.join( - self._temp_obj.get_template_dir(), - AlertActionsConfGeneration.DEFAULT_EVENTTYPES_TEMPLATE)) + self._logger.info( + 'status="starting", operation="generate", ' + + 'object="eventtypes.conf", object_type="file"' + ) + template = Template( + filename=op.join( + self._temp_obj.get_template_dir(), + AlertActionsConfGeneration.DEFAULT_EVENTTYPES_TEMPLATE, + ) + ) alert_obj = Munch.fromDict(self._alert_settings) final_string = template.render(mod_alerts=alert_obj) text = linesep.join([s.strip() for s in final_string.splitlines()]) file_path = self.get_local_conf_file_path(conf_name=self._eventtypes_conf) - write_file(self._eventtypes_conf, - file_path, - text, - self._logger) + write_file(self._eventtypes_conf, file_path, text, self._logger) # remove the stanza if not checked for alert in self._alert_settings: - if alert.get("active_response") and alert["active_response"].get("sourcetype"): + if alert.get("active_response") and alert["active_response"].get( + "sourcetype" + ): continue remove_alert_from_conf_file(alert, file_path, self._logger) - self._logger.info('status="success", operation="generate", ' + - 'object="eventtypes.conf", object_type="file"') - + self._logger.info( + 'status="success", operation="generate", ' + + 'object="eventtypes.conf", object_type="file"' + ) def generate_tags(self): - self._logger.info('status="starting", operation="generate", ' + - 'object="tags.conf", object_type="file"') - template = Template(filename=op.join( - self._temp_obj.get_template_dir(), - AlertActionsConfGeneration.DEFAULT_TAGS_TEMPLATE)) + self._logger.info( + 'status="starting", operation="generate", ' + + 'object="tags.conf", object_type="file"' + ) + template = Template( + filename=op.join( + self._temp_obj.get_template_dir(), + AlertActionsConfGeneration.DEFAULT_TAGS_TEMPLATE, + ) + ) alert_obj = Munch.fromDict(self._alert_settings) final_string = template.render(mod_alerts=alert_obj) text = linesep.join([s.strip() for s in final_string.splitlines()]) file_path = self.get_local_conf_file_path(conf_name=self._tags_conf) - write_file(self._tags_conf, - file_path, - text, - self._logger) + write_file(self._tags_conf, file_path, text, self._logger) # remove the stanza if not checked for alert in self._alert_settings: - if alert.get("active_response") and alert["active_response"].get("sourcetype"): + if alert.get("active_response") and alert["active_response"].get( + "sourcetype" + ): continue remove_alert_from_conf_file(alert, file_path, self._logger) - self._logger.info('status="success", operation="generate", ' + - 'object="tags.conf", object_type="file"') + self._logger.info( + 'status="success", operation="generate", ' + + 'object="tags.conf", object_type="file"' + ) def generate_spec(self): - self._logger.info('status="starting", operation="generate", ' + - 'object="alert_actions.conf.spec", object_type="file"') - template = Template(filename=op.join( - self._temp_obj.get_template_dir(), - AlertActionsConfGeneration.DEFAULT_SPEC_TEMPLATE)) + self._logger.info( + 'status="starting", operation="generate", ' + + 'object="alert_actions.conf.spec", object_type="file"' + ) + template = Template( + filename=op.join( + self._temp_obj.get_template_dir(), + AlertActionsConfGeneration.DEFAULT_SPEC_TEMPLATE, + ) + ) alert_obj = Munch.fromDict(self._alert_settings) final_string = template.render(mod_alerts=alert_obj) text = linesep.join([s.strip() for s in final_string.splitlines()]) - write_file(self._alert_spec_name, - self.get_spec_file_path(), - text, - self._logger) - self._logger.info('status="success", operation="generate", ' + - 'object="alert_actions.conf.spec", object_type="file"') - + write_file(self._alert_spec_name, self.get_spec_file_path(), text, self._logger) + self._logger.info( + 'status="success", operation="generate", ' + + 'object="alert_actions.conf.spec", object_type="file"' + ) def handle(self): self.add_default_settings() @@ -207,7 +248,6 @@ def handle(self): self.generate_tags() # self.generate_app_conf() - def add_default_settings(self): default_settings = None with open(self._default_settings_file) as df: @@ -221,15 +261,19 @@ def add_default_settings(self): continue alert[ac.ALERT_PROPS][k] = v - self._logger.info('status="success", operation="Add default setting", alert_name="%s", "%s"="%s"', - alert[ac.SHORT_NAME], k, v) - - -def generate_alert_actions_conf(input_setting=None, package_path=None, - logger=None, **kwargs): - obj = AlertActionsConfGeneration(input_setting=input_setting, - package_path=package_path, - logger=logger, - **kwargs) + self._logger.info( + 'status="success", operation="Add default setting", alert_name="%s", "%s"="%s"', + alert[ac.SHORT_NAME], + k, + v, + ) + + +def generate_alert_actions_conf( + input_setting=None, package_path=None, logger=None, **kwargs +): + obj = AlertActionsConfGeneration( + input_setting=input_setting, package_path=package_path, logger=logger, **kwargs + ) obj.handle() return None diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_exceptions.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_exceptions.py index cab5915fa..7f4ce1585 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_exceptions.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_exceptions.py @@ -14,6 +14,7 @@ # limitations under the License. # + class AlertParameterInputSettingNotImplemented(Exception): pass diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_helper.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_helper.py index 8f44cc34c..e512eebf1 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_helper.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_helper.py @@ -27,15 +27,17 @@ def write_file(file_name, file_path, content, logger, merge="stanza_overwrite"): - logger.debug('operation="write", object="%s" object_type="file"', - file_path) + logger.debug('operation="write", object="%s" object_type="file"', file_path) do_merge = False - if file_name.endswith('.conf') or file_name.endswith('conf.spec'): + if file_name.endswith(".conf") or file_name.endswith("conf.spec"): do_merge = True else: - logger.info('event="Will not merge file="%s", ' + - 'reason="Only support conf file merge"', file_path) + logger.info( + 'event="Will not merge file="%s", ' + + 'reason="Only support conf file merge"', + file_path, + ) if file_path: new_file = None @@ -43,7 +45,7 @@ def write_file(file_name, file_path, content, logger, merge="stanza_overwrite"): new_file = op.join(op.dirname(file_path), "new_" + file_name) if new_file: try: - with open(new_file, 'w+') as fhandler: + with open(new_file, "w+") as fhandler: fhandler.write(content) merge_conf_file(new_file, file_path, merge) finally: @@ -52,29 +54,27 @@ def write_file(file_name, file_path, content, logger, merge="stanza_overwrite"): else: if not op.exists(op.dirname(file_path)): makedirs(op.dirname(file_path)) - with open(file_path, 'w+') as fhandler: + with open(file_path, "w+") as fhandler: fhandler.write(content) if do_merge: # need to process the file with conf parser parser = TABConfigParser() parser.read(file_path) - with open(file_path, 'w') as df: + with open(file_path, "w") as df: parser.write(df) else: - sys.stdout.write("\n##################File {}##################\n".format(file_name)) + sys.stdout.write( + "\n##################File {}##################\n".format(file_name) + ) sys.stdout.write(content) -GLOBAL_SETTING_TYPE_MAP = { - "text": "text", - "checkbox": "bool", - "password": "password" -} +GLOBAL_SETTING_TYPE_MAP = {"text": "text", "checkbox": "bool", "password": "password"} GLOBAL_SETTING_VALUE_NAME_MAP = { "text": "content", "bool": "bool", - "password": "password" + "password": "password", } @@ -109,14 +109,15 @@ def convert_custom_setting(parameters): for param in parameters: if param.get("format_type") not in list(GLOBAL_SETTING_TYPE_MAP.keys()): msg = 'format_type="{}" is not support for global setting'.format( - param.get("format_type")) + param.get("format_type") + ) raise Exception(msg) one_param = { "title": param.get("label"), "name": param.get("name"), "type": GLOBAL_SETTING_TYPE_MAP[param.get("format_type")], - "description": param.get("help_string") + "description": param.get("help_string"), } formated.append(one_param) @@ -125,54 +126,54 @@ def convert_custom_setting(parameters): def convert_global_setting(global_settings): """ - convert - { - "customized_settings": { - "string_label": { - "type": "text", - "content": "string" - }, - "password": { - "type": "password", - "password": "123" - }, - "checkbox": { - "type": "bool", - "bool": true - } - }, - "proxy_settings": { - "proxy_password": "sef", - "proxy_type": "http", - "proxy_url": "1.2.3.4", - }, - "global_settings": { - "log_level": "INFO" - } - to - { - "title": "Proxy", - "name": "proxy", - "type": "default_proxy", - "description": "proxy settings" - }, - { - "title": "Account Key Title", - "name": "username", - "type": "default_account", - "description": "The username of the user account" - }, - { - "title": "Account Secret Title", - "name": "password", - "type": "default_account", - "description": "The password of the user account" - }, - { - "title": "customized key", - "name": "customized name", - "type": "text", - "description": "description of customized key" + convert + { + "customized_settings": { + "string_label": { + "type": "text", + "content": "string" + }, + "password": { + "type": "password", + "password": "123" + }, + "checkbox": { + "type": "bool", + "bool": true + } + }, + "proxy_settings": { + "proxy_password": "sef", + "proxy_type": "http", + "proxy_url": "1.2.3.4", + }, + "global_settings": { + "log_level": "INFO" + } + to + { + "title": "Proxy", + "name": "proxy", + "type": "default_proxy", + "description": "proxy settings" + }, + { + "title": "Account Key Title", + "name": "username", + "type": "default_account", + "description": "The username of the user account" + }, + { + "title": "Account Secret Title", + "name": "password", + "type": "default_account", + "description": "The password of the user account" + }, + { + "title": "customized key", + "name": "customized name", + "type": "text", + "description": "description of customized key" """ converted = [] if not global_settings: @@ -184,27 +185,30 @@ def convert_global_setting(global_settings): "title": "Proxy", "name": "proxy", "type": "default_proxy", - "description": "proxy settings"} + "description": "proxy settings", + } converted.append(proxy) elif type == "log_settings": logging = { "title": "Logging", "name": "logging", "type": "default_logging", - "description": "logging setting"} + "description": "logging setting", + } converted.append(logging) elif type == "credential_settings": username = { "title": "Account Key Title", "name": "tab_default_account_username", "type": "default_account", - "description": "The username of the user account" + "description": "The username of the user account", } password = { "title": "Account Secret Title", "name": "tab_default_account_password", "type": "default_account", - "description": "The password of the user account"} + "description": "The password of the user account", + } converted.append(username) converted.append(password) elif type == "customized_settings": @@ -213,50 +217,49 @@ def convert_global_setting(global_settings): return converted - def convert_global_setting_previous(global_settings): """ - convert global_settings=[ + convert global_settings=[ + { + "type": "proxy" + }, + { + "type": "logging" + }, + { + "type": "account" + }, + { + "type": "custom", + "parameters": [] + } + ] + to [ { - "type": "proxy" + "title": "Proxy", + "name": "proxy", + "type": "default_proxy", + "description": "proxy settings" }, { - "type": "logging" + "title": "Account Key Title", + "name": "username", + "type": "default_account", + "description": "The username of the user account" }, { - "type": "account" + "title": "Account Secret Title", + "name": "password", + "type": "default_account", + "description": "The password of the user account" }, { - "type": "custom", - "parameters": [] + "title": "customized key", + "name": "customized name", + "type": "text", + "description": "description of customized key" } ] - to [ - { - "title": "Proxy", - "name": "proxy", - "type": "default_proxy", - "description": "proxy settings" - }, - { - "title": "Account Key Title", - "name": "username", - "type": "default_account", - "description": "The username of the user account" - }, - { - "title": "Account Secret Title", - "name": "password", - "type": "default_account", - "description": "The password of the user account" - }, - { - "title": "customized key", - "name": "customized name", - "type": "text", - "description": "description of customized key" - } -] """ converted = [] if not global_settings: @@ -268,27 +271,30 @@ def convert_global_setting_previous(global_settings): "title": "Proxy", "name": "proxy", "type": "default_proxy", - "description": "proxy settings"} + "description": "proxy settings", + } converted.append(proxy) elif setting.get("type") == "logging": logging = { "title": "Logging", "name": "logging", "type": "default_logging", - "description": "logging setting"} + "description": "logging setting", + } converted.append(logging) elif setting.get("type") == "account": username = { "title": "Account Key Title", "name": "username", "type": "default_account", - "description": "The username of the user account" + "description": "The username of the user account", } password = { "title": "Account Secret Title", "name": "password", "type": "default_account", - "description": "The password of the user account"} + "description": "The password of the user account", + } converted.append(username) converted.append(password) elif setting.get("type") == "custom": @@ -324,35 +330,35 @@ def get_parameter_type(param, parameters_meta, logger): def convert_test_global_settings(test_global_settings, logger): """ - convert to: -{ - "customized_settings": { - "string_label": { - "type": "text", - "content": "string" - }, - "password": { - "type": "password", - "password": "123" - }, - "checkbox": { - "type": "bool", - "bool": true + convert to: + { + "customized_settings": { + "string_label": { + "type": "text", + "content": "string" + }, + "password": { + "type": "password", + "password": "123" + }, + "checkbox": { + "type": "bool", + "bool": true + } + }, + "proxy_settings": { + "proxy_password": "sef", + "proxy_type": "http", + "proxy_url": "1.2.3.4", + "proxy_rdns": "0", + "proxy_username": "sdf", + "proxy_port": "34", + "proxy_enabled": "1" + }, + "global_settings": { + "log_level": "INFO" + } } - }, - "proxy_settings": { - "proxy_password": "sef", - "proxy_type": "http", - "proxy_url": "1.2.3.4", - "proxy_rdns": "0", - "proxy_username": "sdf", - "proxy_port": "34", - "proxy_enabled": "1" - }, - "global_settings": { - "log_level": "INFO" - } -} """ if not test_global_settings: logger.info('test_global_settings="%s"', test_global_settings) @@ -365,12 +371,12 @@ def convert_test_global_settings(test_global_settings, logger): for setting in settings: type = get_test_parameter_type(setting) if not type: - msg = 'No type for {} in customized_settings'.format(setting) + msg = "No type for {} in customized_settings".format(setting) raise NotImplementedError(msg) converted["customized_settings"][setting["name"]] = { "type": type, - GLOBAL_SETTING_VALUE_NAME_MAP[type]: setting.get("value") + GLOBAL_SETTING_VALUE_NAME_MAP[type]: setting.get("value"), } elif type == "log_settings": converted["global_settings"] = settings diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_html_gen.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_html_gen.py index 5b870e8f7..6af2746c5 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_html_gen.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_html_gen.py @@ -33,7 +33,6 @@ class AlertHtmlBase: - def __init__(self, input_setting=None, package_path=None, logger=None): self._all_setting = input_setting self._logger = logger @@ -47,60 +46,71 @@ def get_alert_html_path(self): if not self._package_path: return None - html_path = op.join(self._package_path, "default", "data", "ui", - "alerts") + html_path = op.join(self._package_path, "default", "data", "ui", "alerts") if not op.exists(html_path): os.makedirs(html_path) - return op.join(html_path, - self.get_alert_html_name()) + return op.join(html_path, self.get_alert_html_name()) class AlertHtmlGenerator(AlertHtmlBase): DEFAULT_TEMPLATE_HTML = "mod_alert.html.template" DEFAULT_HOME_HTML = "default.html" - def __init__(self, input_setting=None, package_path=None, logger=None, - html_template=None, - html_home=None, - html_theme=None): - super().__init__(input_setting, package_path, - logger) + def __init__( + self, + input_setting=None, + package_path=None, + logger=None, + html_template=None, + html_home=None, + html_theme=None, + ): + super().__init__(input_setting, package_path, logger) if not input_setting or not logger: msg = 'required_args="input_setting, logger"' raise aae.AlertActionsInValidArgs(msg) self._alert_actions_setting = input_setting[ac.MODULAR_ALERTS] self._template = None - self._html_template = html_template or \ - AlertHtmlGenerator.DEFAULT_TEMPLATE_HTML + self._html_template = html_template or AlertHtmlGenerator.DEFAULT_TEMPLATE_HTML self._html_home = html_home or AlertHtmlGenerator.DEFAULT_HOME_HTML self._temp_obj = AlertActionsTemplateMgr(html_theme=html_theme) self._html_theme = self._temp_obj.get_html_lookup_dir() - self._logger.info('html_theme="%s" html_template="%s", html_home="%s"', - self._html_theme, - self._html_template, - self._html_home) + self._logger.info( + 'html_theme="%s" html_template="%s", html_home="%s"', + self._html_theme, + self._html_template, + self._html_home, + ) def handle_one_alert(self, one_alert_setting): self._current_alert = one_alert_setting alert_obj = Munch.fromDict(one_alert_setting) - final_form = self._template.render(mod_alert=alert_obj, - home_page=self._html_home) + final_form = self._template.render( + mod_alert=alert_obj, home_page=self._html_home + ) final_form = defused_lxml.fromstring(final_form) -# Checking python version before converting and encoding XML Tree to string. + # Checking python version before converting and encoding XML Tree to string. final_string = defused_lxml.tostring( - final_form, encoding='utf-8', pretty_print=True) + final_form, encoding="utf-8", pretty_print=True + ) text = linesep.join( - [s for s in final_string.decode('utf-8').splitlines() if not search(r'^\s*$', s)]) - - self._logger.debug('operation="Write", object_type="File", object="%s"', - self.get_alert_html_path()) - - write_file(self.get_alert_html_name(), - self.get_alert_html_path(), - text, - self._logger) + [ + s + for s in final_string.decode("utf-8").splitlines() + if not search(r"^\s*$", s) + ] + ) + + self._logger.debug( + 'operation="Write", object_type="File", object="%s"', + self.get_alert_html_path(), + ) + + write_file( + self.get_alert_html_name(), self.get_alert_html_path(), text, self._logger + ) def handle(self): self._logger.info("html_theme=%s", self._html_theme) @@ -108,15 +118,15 @@ def handle(self): template_text = None template_path = self._html_template if not op.isabs(self._html_template): - template_path = op.join(self._temp_obj.get_template_dir(), - self._html_template) + template_path = op.join( + self._temp_obj.get_template_dir(), self._html_template + ) self._logger.debug("Reading template_file=%s", template_path) with open(template_path) as tp: template_text = tp.read() - self._template = Template(text=template_text, - lookup=tmp_lookup) + self._template = Template(text=template_text, lookup=tmp_lookup) self._logger.info("Start to generate alert actions html files") for alert in self._alert_actions_setting: @@ -124,9 +134,9 @@ def handle(self): self._logger.info("Finished generating alert actions html files") -def generate_alert_actions_html_files(input_setting=None, package_path=None, - logger=None, - html_setting=None): +def generate_alert_actions_html_files( + input_setting=None, package_path=None, logger=None, html_setting=None +): html_template = None html_home = None html_theme = None @@ -134,12 +144,13 @@ def generate_alert_actions_html_files(input_setting=None, package_path=None, html_template = html_setting.get("html_template") html_home = html_setting.get("html_home") html_theme = html_setting.get("html_theme") - html_gen = AlertHtmlGenerator(input_setting=input_setting, - package_path=package_path, - logger=logger, - html_template=html_template, - html_home=html_home, - html_theme=html_theme - ) + html_gen = AlertHtmlGenerator( + input_setting=input_setting, + package_path=package_path, + logger=logger, + html_template=html_template, + html_home=html_home, + html_theme=html_theme, + ) html_gen.handle() return None diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_merge.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_merge.py index a8752de3b..254ce5eb1 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_merge.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_merge.py @@ -27,10 +27,8 @@ from . import alert_actions_exceptions as aae from . import arf_consts as ac -merge_deny_list = ['default.meta', 'README.txt'] -merge_mode_config = { - "app.conf": "item_overwrite" -} +merge_deny_list = ["default.meta", "README.txt"] +merge_mode_config = {"app.conf": "item_overwrite"} def remove_alert_from_conf_file(alert, conf_file, logger): @@ -39,7 +37,9 @@ def remove_alert_from_conf_file(alert, conf_file, logger): return if not isinstance(alert, dict): - msg = 'alert="{}", event="alert is not a dict, don\'t remove anything form file {}"'.format(alert, conf_file) + msg = 'alert="{}", event="alert is not a dict, don\'t remove anything form file {}"'.format( + alert, conf_file + ) raise aae.AlertCleaningFormatFailure(msg) parser = TABConfigParser() @@ -47,12 +47,17 @@ def remove_alert_from_conf_file(alert, conf_file, logger): conf_dict = parser.item_dict() for stanza, key_values in list(conf_dict.items()): - if stanza == alert[ac.SHORT_NAME] or \ - stanza == alert[ac.SHORT_NAME] + "_modaction_result" or \ - stanza == "eventtype=" + alert[ac.SHORT_NAME] + "_modaction_result": - logger.info('alert="%s", conf_file="%s", stanza="%s"', - alert[ac.SHORT_NAME], - conf_file, stanza) + if ( + stanza == alert[ac.SHORT_NAME] + or stanza == alert[ac.SHORT_NAME] + "_modaction_result" + or stanza == "eventtype=" + alert[ac.SHORT_NAME] + "_modaction_result" + ): + logger.info( + 'alert="%s", conf_file="%s", stanza="%s"', + alert[ac.SHORT_NAME], + conf_file, + stanza, + ) parser.remove_section(stanza) with open(conf_file, "w") as cf: @@ -79,7 +84,7 @@ def merge_conf_file(src_file, dst_file, merge_mode="stanza_overwrite"): if stanza not in dst_dict: parser.add_section(stanza) else: - parser.remove_section(stanza,false) + parser.remove_section(stanza, false) parser.add_section(stanza) for k, v in list(key_values.items()): @@ -118,14 +123,16 @@ def merge(src, dst, no_deny_list=True): if file.endswith("pyo") or file.endswith("pyc"): continue - if file in dst_files and (file.endswith('.conf') or file.endswith('.conf.spec')): + if file in dst_files and ( + file.endswith(".conf") or file.endswith(".conf.spec") + ): if file in list(merge_mode_config.keys()): merge_mode = merge_mode_config[file] merge_conf_file(f_path, op.join(dst, file), merge_mode) else: copy(f_path, dst) elif op.isdir(f_path): - if file.startswith('.'): + if file.startswith("."): continue if file not in dst_files: os.makedirs(op.join(dst, file)) diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_py_gen.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_py_gen.py index 72cbf97b9..bd9ced793 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_py_gen.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_py_gen.py @@ -29,10 +29,16 @@ class AlertActionsPyBase: - - def __init__(self, input_setting=None, package_path=None, logger=None, - template_py=None, lookup_dir=None, global_settings=None, - **kwargs): + def __init__( + self, + input_setting=None, + package_path=None, + logger=None, + template_py=None, + lookup_dir=None, + global_settings=None, + **kwargs + ): self._all_setting = input_setting self._logger = logger self._package_path = package_path @@ -43,8 +49,8 @@ def __init__(self, input_setting=None, package_path=None, logger=None, self._lib_dir = self.get_python_lib_dir_name(self._ta_name) def get_python_lib_dir_name(self, app_name): - space_replace = re.compile(r'[^\w]+') - return space_replace.sub('_', app_name.lower()) + space_replace = re.compile(r"[^\w]+") + return space_replace.sub("_", app_name.lower()) def get_alert_py_name(self, helper=""): return self._current_alert[ac.SHORT_NAME] + helper + ".py" @@ -60,8 +66,9 @@ def get_alert_helper_py_name(self): def get_alert_helper_py_path(self): if not self._package_path: return None - return op.join(self._package_path, "bin", self._lib_dir, - self.get_alert_helper_py_name()) + return op.join( + self._package_path, "bin", self._lib_dir, self.get_alert_helper_py_name() + ) def get_declare_py_name(self): return self._lib_dir + "_declare.py" @@ -75,12 +82,12 @@ def get_decalre_py_path(self): def get_template_py_files(self): bin_dir = op.join(self._package_path, "bin") return [ -# op.join(bin_dir, self._lib_dir + "_declare.py"), + # op.join(bin_dir, self._lib_dir + "_declare.py"), op.join(bin_dir, self._lib_dir + "_declare.pyc"), op.join(bin_dir, self._lib_dir + "_declare.pyo"), op.join(bin_dir, self._lib_dir, "setup_util_helper.py"), op.join(bin_dir, self._lib_dir, "setup_util_helper.pyc"), - op.join(bin_dir, self._lib_dir, "setup_util_helper.pyo") + op.join(bin_dir, self._lib_dir, "setup_util_helper.pyo"), ] @@ -91,31 +98,44 @@ class AlertActionsPyGenerator(AlertActionsPyBase): CURRENT_DIR = op.dirname(op.abspath(__file__)) DEFAULT_LOOKUP_DIR = op.join(CURRENT_DIR, "default_py") - def __init__(self, input_setting=None, package_path=None, logger=None, - template_py=None, template_helper_py=None, - template_declare_py=None, lookup_dir=None, - global_settings=None, - **kwargs): + def __init__( + self, + input_setting=None, + package_path=None, + logger=None, + template_py=None, + template_helper_py=None, + template_declare_py=None, + lookup_dir=None, + global_settings=None, + **kwargs + ): if not input_setting or not logger: msg = 'required_args="input_setting, logger"' raise aae.AlertActionsInValidArgs(msg) super().__init__( - input_setting=input_setting, package_path=package_path, - logger=logger, template_py=template_py, lookup_dir=lookup_dir, - global_settings=global_settings, **kwargs) + input_setting=input_setting, + package_path=package_path, + logger=logger, + template_py=template_py, + lookup_dir=lookup_dir, + global_settings=global_settings, + **kwargs + ) self._temp_obj = AlertActionsTemplateMgr() self._template = None - self._template_py = template_py or \ - AlertActionsPyGenerator.DEFAULT_TEMPLATE_PY - self._template_helper_py = template_helper_py or \ - AlertActionsPyGenerator.DEFAULT_TEMPLATE_HELPER_PY - self._template_declare_py = template_declare_py or \ - AlertActionsPyGenerator.DEFAULT_TEMPLATE_DECLARE_PY - self._lookup_dir = lookup_dir or \ - AlertActionsPyGenerator.DEFAULT_LOOKUP_DIR - self._logger.info("template_py=%s lookup_dir=%s", self._template_py, - self._lookup_dir) + self._template_py = template_py or AlertActionsPyGenerator.DEFAULT_TEMPLATE_PY + self._template_helper_py = ( + template_helper_py or AlertActionsPyGenerator.DEFAULT_TEMPLATE_HELPER_PY + ) + self._template_declare_py = ( + template_declare_py or AlertActionsPyGenerator.DEFAULT_TEMPLATE_DECLARE_PY + ) + self._lookup_dir = lookup_dir or AlertActionsPyGenerator.DEFAULT_LOOKUP_DIR + self._logger.info( + "template_py=%s lookup_dir=%s", self._template_py, self._lookup_dir + ) self._output = {} self.other_setting = kwargs @@ -124,12 +144,11 @@ def merge_py_code(self, init, new): self._logger.info("No previous code, don't merge new parameters in") return new - start = r'\[sample_code_macro:start\]' - end = r'\[sample_code_macro:end\]' - start_str = '[sample_code_macro:start]' - end_str = '[sample_code_macro:end]' - pattern = re.compile(start + r'((.|[\r\n])*)' + end, - re.MULTILINE) + start = r"\[sample_code_macro:start\]" + end = r"\[sample_code_macro:end\]" + start_str = "[sample_code_macro:start]" + end_str = "[sample_code_macro:end]" + pattern = re.compile(start + r"((.|[\r\n])*)" + end, re.MULTILINE) matched = pattern.search(init) if not matched: self._logger.info("No macro anymore, don't merge new parameters in") @@ -138,16 +157,15 @@ def merge_py_code(self, init, new): matched = pattern.search(new) if matched: new_c = matched.group(1) - return re.sub(start + r'((.|[\r\n])*)' + end, - start_str + new_c + end_str, - init) + return re.sub( + start + r"((.|[\r\n])*)" + end, start_str + new_c + end_str, init + ) def gen_py_file(self, one_alert_setting): self._current_alert = one_alert_setting self.gen_main_py_file() self.gen_helper_py_file() - def gen_main_py_file(self): current_dir = op.dirname(op.abspath(__file__)) lookup_dir = op.join(current_dir, "default_py") @@ -155,8 +173,9 @@ def gen_main_py_file(self): template_path = self._template_py if not op.isabs(self._template_py): - template_path = op.join(self._temp_obj.get_template_dir(), - self._template_py) + template_path = op.join( + self._temp_obj.get_template_dir(), self._template_py + ) template = Template(filename=template_path, lookup=tmp_lookup) # start to render new py file @@ -168,15 +187,18 @@ def gen_main_py_file(self): lib_name=self._lib_dir, mod_alert=Munch.fromDict(self._current_alert), global_settings=Munch.fromDict(settings), - helper_name=op.splitext(self.get_alert_helper_py_name())[0] + helper_name=op.splitext(self.get_alert_helper_py_name())[0], ) - self._logger.debug('operation="Writing file", file="%s"', - self.get_alert_py_path()) - write_file(self.get_alert_py_name(), - self.get_alert_py_path(), - rendered_content, - self._logger) + self._logger.debug( + 'operation="Writing file", file="%s"', self.get_alert_py_path() + ) + write_file( + self.get_alert_py_name(), + self.get_alert_py_path(), + rendered_content, + self._logger, + ) def gen_helper_py_file(self): current_dir = op.dirname(op.abspath(__file__)) @@ -185,8 +207,9 @@ def gen_helper_py_file(self): template_path = self._template_helper_py if not op.isabs(self._template_helper_py): - template_path = op.join(self._temp_obj.get_template_dir(), - self._template_helper_py) + template_path = op.join( + self._temp_obj.get_template_dir(), self._template_helper_py + ) template = Template(filename=template_path, lookup=tmp_lookup) name = self._current_alert[ac.SHORT_NAME] @@ -203,35 +226,40 @@ def gen_helper_py_file(self): rendered_content = template.render( input=Munch.fromDict(self._all_setting), mod_alert=Munch.fromDict(self._current_alert), - global_settings=Munch.fromDict(settings) + global_settings=Munch.fromDict(settings), ) final_content = self.merge_py_code(init_content, rendered_content) - self._logger.debug('operation="Writing file", file="%s"', - self.get_alert_py_path()) - write_file(self.get_alert_helper_py_name(), - self.get_alert_helper_py_path(), - final_content, - self._logger) + self._logger.debug( + 'operation="Writing file", file="%s"', self.get_alert_py_path() + ) + write_file( + self.get_alert_helper_py_name(), + self.get_alert_helper_py_path(), + final_content, + self._logger, + ) if not self._output.get(name): self._output[name] = {} self._output[name][self.get_alert_py_name(helper="_helper")] = final_content def handle(self): for alert in self._alert_actions_setting: - self._logger.info('operation="Generate py file", alert_action="%s"', - alert[ac.SHORT_NAME]) + self._logger.info( + 'operation="Generate py file", alert_action="%s"', alert[ac.SHORT_NAME] + ) self.gen_py_file(alert) -def generate_alert_actions_py_files(input_setting=None, package_path=None, - logger=None, global_settings=None, - **kwargs): - py_gen = AlertActionsPyGenerator(input_setting=input_setting, - package_path=package_path, - logger=logger, - global_settings=global_settings, - **kwargs) +def generate_alert_actions_py_files( + input_setting=None, package_path=None, logger=None, global_settings=None, **kwargs +): + py_gen = AlertActionsPyGenerator( + input_setting=input_setting, + package_path=package_path, + logger=logger, + global_settings=global_settings, + **kwargs + ) py_gen.handle() return py_gen._output - diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_template.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_template.py index 6fed04f23..fc8065e91 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_template.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_template.py @@ -23,10 +23,10 @@ class AlertActionsTemplateMgr: DEFAULT_HTML_LOOKUP_DIR = op.join(DEFAULT_TEMPLATE_DIR, "default_html_theme") def __init__(self, template_dir=None, html_theme=None): - self._template_dir = template_dir or \ - AlertActionsTemplateMgr.DEFAULT_TEMPLATE_DIR - self._html_theme = html_theme or \ - AlertActionsTemplateMgr.DEFAULT_HTML_LOOKUP_DIR + self._template_dir = ( + template_dir or AlertActionsTemplateMgr.DEFAULT_TEMPLATE_DIR + ) + self._html_theme = html_theme or AlertActionsTemplateMgr.DEFAULT_HTML_LOOKUP_DIR def get_template_dir(self): return self._template_dir diff --git a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/cim_actions.py b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/cim_actions.py index cd8ac84e3..257281cb7 100644 --- a/splunk_add_on_ucc_framework/modular_alert_builder/build_core/cim_actions.py +++ b/splunk_add_on_ucc_framework/modular_alert_builder/build_core/cim_actions.py @@ -26,47 +26,48 @@ class InvalidResultID(Exception): pass + class ModularAction: - def __init__(self, settings, logger, action_name='unknown'): - self.settings = json.loads(settings) - self.logger = logger - self.session_key = self.settings.get('session_key') - self.sid = self.settings.get('sid') + def __init__(self, settings, logger, action_name="unknown"): + self.settings = json.loads(settings) + self.logger = logger + self.session_key = self.settings.get("session_key") + self.sid = self.settings.get("sid") ## if sid contains rt_scheduler with snapshot-sid; drop snapshot-sid ## sometimes self.sid may be an integer (1465593470.1228) try: - rtsid = re.match(r'^(rt_scheduler.*)\.\d+$', self.sid) + rtsid = re.match(r"^(rt_scheduler.*)\.\d+$", self.sid) if rtsid: self.sid = rtsid.group(1) except: pass - self.orig_sid = '' - self.rid = '' - self.orig_rid = '' - self.results_file = self.settings.get('results_file') - self.search_name = self.settings.get('search_name') - self.app = self.settings.get('app') - self.user = self.settings.get('user') or self.settings.get('owner') - self.configuration = self.settings.get('configuration', {}) + self.orig_sid = "" + self.rid = "" + self.orig_rid = "" + self.results_file = self.settings.get("results_file") + self.search_name = self.settings.get("search_name") + self.app = self.settings.get("app") + self.user = self.settings.get("user") or self.settings.get("owner") + self.configuration = self.settings.get("configuration", {}) ## enforce configuration is a 'dict' if not isinstance(self.configuration, dict): self.configuration = {} ## set loglevel to DEBUG if verbose - if normalizeBoolean(self.configuration.get('verbose', 'false')): + if normalizeBoolean(self.configuration.get("verbose", "false")): self.logger.setLevel(logging.DEBUG) self.logger.debug("loglevel set to DEBUG") ## use | sendalert param.action_name=$action_name$ - self.action_name = self.configuration.get('action_name') or action_name + self.action_name = self.configuration.get("action_name") or action_name ## use search_name to determine action_mode if self.search_name: - self.action_mode = 'saved' + self.action_mode = "saved" else: - self.action_mode = 'adhoc' + self.action_mode = "adhoc" - self.action_status = '' + self.action_status = "" ## Since we don't use the result object we get from settings it will be purged try: - del self.settings['result'] + del self.settings["result"] except Exception: pass @@ -76,15 +77,21 @@ def addjobinfo(self): self.job = {} if self.sid: try: - response, content = rest.simpleRequest('search/jobs/%s' % self.sid, sessionKey=self.session_key, getargs={'output_mode': 'json'}) + response, content = rest.simpleRequest( + "search/jobs/%s" % self.sid, + sessionKey=self.session_key, + getargs={"output_mode": "json"}, + ) if response.status == 200: - self.job = json.loads(content)['entry'][0]['content'] - self.logger.info(self.message('Successfully retrieved search job info')) + self.job = json.loads(content)["entry"][0]["content"] + self.logger.info( + self.message("Successfully retrieved search job info") + ) self.logger.debug(self.job) else: - self.logger.warn(self.message('Could not retrieve search job info')) + self.logger.warn(self.message("Could not retrieve search job info")) except Exception as e: - self.logger.warn(self.message('Could not retrieve search job info')) + self.logger.warn(self.message("Could not retrieve search job info")) ## The purpose of this method is to provide a common messaging interface def message(self, signature, status=None): @@ -105,23 +112,23 @@ def message(self, signature, status=None): for k, v in message_params.items(): # Do not include empty value params in the message. if v != "": - message += f"{k}=\"{v}\" " + message += f'{k}="{v}" ' return message.rstrip() ## The purpose of this method is to update per-result ModAction attributes def update(self, result): ## This is for events/results that were created as the result of a previous action - self.orig_sid = result.get('orig_sid', '') + self.orig_sid = result.get("orig_sid", "") ## This is for events/results that were created as the result of a previous action - self.orig_rid = result.get('orig_rid', '') - if 'rid' in result: - self.rid = result['rid'] + self.orig_rid = result.get("orig_rid", "") + if "rid" in result: + self.rid = result["rid"] else: - raise InvalidResultID('Result must have an ID') + raise InvalidResultID("Result must have an ID") ## The purpose of this method is to generate per-result invocation messages def invoke(self): - self.logger.info(self.message('Invoking modular action')) + self.logger.info(self.message("Invoking modular action")) def dowork(self): return diff --git a/splunk_add_on_ucc_framework/normalize.py b/splunk_add_on_ucc_framework/normalize.py index 12b01c125..a3dc32c7b 100644 --- a/splunk_add_on_ucc_framework/normalize.py +++ b/splunk_add_on_ucc_framework/normalize.py @@ -28,12 +28,12 @@ "type": "format_type", "valueField": "value-field", "labelField": "label-field", - "name": "short_name" + "name": "short_name", } mapping_values = { "singleSelect": "dropdownlist", - "singleSelectSplunkSearch": "dropdownlist_splunk_search" + "singleSelectSplunkSearch": "dropdownlist_splunk_search", } @@ -54,20 +54,20 @@ def transform_params(parameter_list): label_field = param.pop("label-field") search = param.pop("search") param["ctrl_props"] = { - 'value-field': value_field, - 'label-field': label_field, - 'search': search + "value-field": value_field, + "label-field": label_field, + "search": search, } def iterdict(dictionary, result): - ''' + """ This function replaces key and value with the ones required by add-on alert builder - ''' + """ for key in dictionary: if key in mapping_keys: value = result.pop(key) - result[mapping_keys[key]] = value + result[mapping_keys[key]] = value mapped_key = mapping_keys[key] else: mapped_key = key @@ -75,7 +75,9 @@ def iterdict(dictionary, result): if isinstance(dictionary[key], dict): iterdict(dictionary[key], result[mapped_key]) elif isinstance(dictionary[key], list): - for dictionary_item, result_item in zip(dictionary[key], result[mapped_key]): + for dictionary_item, result_item in zip( + dictionary[key], result[mapped_key] + ): if isinstance(dictionary_item, dict): iterdict(dictionary_item, result_item) else: @@ -84,31 +86,27 @@ def iterdict(dictionary, result): def form_main_dict(alert, product_id, short_name): - ''' + """ Forms the final dictionary required by add-on alert generator - ''' + """ return { "product_id": product_id, "short_name": short_name, "global_settings": "", "html_setting": None, - "build_components": { - "conf": "True", - "py": "True", - "html": "True" - }, + "build_components": {"conf": "True", "py": "True", "html": "True"}, "schema.content": { "product_id": product_id, "short_name": short_name, - "modular_alerts": alert - } + "modular_alerts": alert, + }, } def normalize(schema_content, product_id, short_name): - ''' + """ Process the globalConfig alert schema to generate structure required by add-on alert generator - ''' + """ result = copy.deepcopy(schema_content) iterdict(schema_content, result) for alert in result["alerts"]: diff --git a/splunk_add_on_ucc_framework/start_alert_build.py b/splunk_add_on_ucc_framework/start_alert_build.py index 1235abb6c..93aec803d 100644 --- a/splunk_add_on_ucc_framework/start_alert_build.py +++ b/splunk_add_on_ucc_framework/start_alert_build.py @@ -29,7 +29,7 @@ def __init__(self, prefix, logger): self.prefix = prefix def process(self, msg, kwargs): - return '[{}] {}'.format(self.prefix, msg), kwargs + return "[{}] {}".format(self.prefix, msg), kwargs def validate(alert, logger): @@ -45,38 +45,61 @@ def validate(alert, logger): if entity.get("type") in ["radio", "singleSelect"]: if not entity.get("options"): raise Exception( - "{} type must have options parameter".format(entity.get("type"))) + "{} type must have options parameter".format( + entity.get("type") + ) + ) elif entity.get("options"): raise Exception( - "{} type must not contain options parameter".format(entity.get("type"))) + "{} type must not contain options parameter".format( + entity.get("type") + ) + ) if entity.get("type") in ["singleSelectSplunkSearch"]: - if not all([entity.get("search"), entity.get("valueField"), entity.get("labelField")]): - raise Exception("{} type must have search, valueLabel and valueField parameters".format( - entity.get("type"))) - elif any([entity.get("search"), entity.get("valueField"), entity.get("labelField")]): - raise Exception("{} type must not contain search, valueField or labelField parameter".format( - entity.get("type"))) + if not all( + [ + entity.get("search"), + entity.get("valueField"), + entity.get("labelField"), + ] + ): + raise Exception( + "{} type must have search, valueLabel and valueField parameters".format( + entity.get("type") + ) + ) + elif any( + [ + entity.get("search"), + entity.get("valueField"), + entity.get("labelField"), + ] + ): + raise Exception( + "{} type must not contain search, valueField or labelField parameter".format( + entity.get("type") + ) + ) except: logger.error(traceback.format_exc()) raise -def alert_build(schema_content, product_id, short_name, output_dir,sourcedir): +def alert_build(schema_content, product_id, short_name, output_dir, sourcedir): # Initializing logger logging.basicConfig() - logger = logging.getLogger('Alert Logger') - logger = LoggerAdapter('ta="{}" Creating Alerts'.format(short_name), - logger) + logger = logging.getLogger("Alert Logger") + logger = LoggerAdapter('ta="{}" Creating Alerts'.format(short_name), logger) # Validation - for alert in schema_content['alerts']: + for alert in schema_content["alerts"]: validate(alert, logger) # Get the alert schema with required structure envs = normalize.normalize(schema_content, product_id, short_name) - pack_folder = os.path.join(sourcedir, 'arf_dir_templates', 'modular_alert_package') + pack_folder = os.path.join(sourcedir, "arf_dir_templates", "modular_alert_package") # Generate Alerts generate_alerts(pack_folder, output_dir, logger, envs) diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/__init__.py b/splunk_add_on_ucc_framework/uccrestbuilder/__init__.py index b3b85187f..b18220c9c 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/__init__.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/__init__.py @@ -19,7 +19,6 @@ """ - import collections from splunktaucclib.rest_handler.schema import RestSchema @@ -36,7 +35,10 @@ __version__ = "0.0.0" -RestHandlerClass = collections.namedtuple("RestHandlerClass", ("module", "name"),) +RestHandlerClass = collections.namedtuple( + "RestHandlerClass", + ("module", "name"), +) def build(schema, handler, output_path, j2_env, post_process=None, *args, **kwargs): @@ -64,7 +66,10 @@ def _parse_handler(handler_path): "Invalid handler specified. " 'It should be in form "module.sub_module.RestHandlerClass".' ) - return RestHandlerClass(module=".".join(parts[:-1]), name=parts[-1],) + return RestHandlerClass( + module=".".join(parts[:-1]), + name=parts[-1], + ) builder_obj = RestBuilder(schema, _parse_handler(handler), output_path) builder_obj.build() diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/builder.py b/splunk_add_on_ucc_framework/uccrestbuilder/builder.py index b46e6c763..599b2e336 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/builder.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/builder.py @@ -15,16 +15,12 @@ # - import os import os.path as op from .rest_conf import RestmapConf, WebConf -__all__ = [ - 'RestBuilderError', - 'RestBuilder' -] +__all__ = ["RestBuilderError", "RestBuilder"] class RestBuilderError(Exception): @@ -33,9 +29,9 @@ class RestBuilderError(Exception): class _RestBuilderOutput: - readme = 'README' - default = 'default' - bin = 'bin' + readme = "README" + default = "default" + bin = "bin" def __init__(self, path, product): self._path = path @@ -56,21 +52,13 @@ def put(self, subpath, file_name, content): def save(self): for full_name, contents in list(self._content.items()): - full_content = '\n\n'.join(contents) - with open(full_name, 'w') as f: + full_content = "\n\n".join(contents) + with open(full_name, "w") as f: f.writelines(full_content) class RestBuilder: - - def __init__( - self, - schema, - handler, - output_path, - *args, - **kwargs - ): + def __init__(self, schema, handler, output_path, *args, **kwargs): """ :param schema: @@ -102,40 +90,38 @@ def build(self): for endpoint in self._schema.endpoints: # If the endpoint is oauth, which is for getting accesstoken. Conf file entries should not get created. if endpoint._name != "oauth": - if endpoint._name == 'settings': + if endpoint._name == "settings": self.output.put( self.output.default, - endpoint.conf_name + '.conf', + endpoint.conf_name + ".conf", endpoint.generate_default_conf(), ) self.output.put( self.output.readme, - endpoint.conf_name + '.conf.spec', + endpoint.conf_name + ".conf.spec", endpoint.generate_spec(), ) # Add data input of self defined conf to inputs.conf.spec if endpoint._entities[0] and endpoint._entities[0]._conf_name: lines = [ - '[' + endpoint._name + '://]', - 'placeholder = placeholder' + "[" + endpoint._name + "://]", + "placeholder = placeholder", ] self.output.put( - self.output.readme, - 'inputs.conf.spec', - '\n'.join(lines) + self.output.readme, "inputs.conf.spec", "\n".join(lines) ) self.output.put( self.output.bin, - endpoint.rh_name + '.py', + endpoint.rh_name + ".py", endpoint.generate_rh(self._handler), ) self.output.put( self.output.default, - 'restmap.conf', + "restmap.conf", RestmapConf.build( self._schema.endpoints, self._schema.namespace, @@ -144,7 +130,7 @@ def build(self): ) self.output.put( self.output.default, - 'web.conf', + "web.conf", WebConf.build(self._schema.endpoints), ) self.output.save() diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/__init__.py b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/__init__.py index b9ea4650a..46020c32c 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/__init__.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/__init__.py @@ -14,4 +14,4 @@ # limitations under the License. # # -# SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +# SPDX-License-Identifier: Apache-2.0 diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/datainput.py b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/datainput.py index eb0c2e97b..cf9d5c28a 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/datainput.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/datainput.py @@ -15,7 +15,6 @@ # - from .single_model import RestEndpointBuilder, RestEntityBuilder diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/field.py b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/field.py index 9029f83d6..2563217e9 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/field.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/field.py @@ -15,14 +15,12 @@ # - - from .base import indent, quote_string class RestFieldBuilder: - _kv_template = '{name} = {value}' + _kv_template = "{name} = {value}" _rh_template = """field.RestField( {name}, required={required}, @@ -38,7 +36,7 @@ def __init__(self, field, validator): def generate_spec(self): return self._kv_template.format( name=self._field.name, - value='', + value="", ) def _indent_validator(self): diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/multiple_model.py b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/multiple_model.py index 54fff9735..a20764ff7 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/multiple_model.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/multiple_model.py @@ -15,7 +15,6 @@ # - from .base import indent from .single_model import RestEndpointBuilder, RestEntityBuilder diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/oauth_model.py b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/oauth_model.py index f519b2132..43dd794c1 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/oauth_model.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/oauth_model.py @@ -15,7 +15,6 @@ # - from .base import RestEndpointBuilder """ diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/single_model.py b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/single_model.py index 7ad82fa65..9a86580f1 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/single_model.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/endpoint/single_model.py @@ -15,7 +15,6 @@ # - from .base import RestEndpointBuilder, RestEntityBuilder diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/global_config.py b/splunk_add_on_ucc_framework/uccrestbuilder/global_config.py index 50466085f..33bfe205a 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/global_config.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/global_config.py @@ -19,7 +19,6 @@ """ - import json import os import os.path as op @@ -39,6 +38,7 @@ MultipleModelEndpointBuilder, MultipleModelEntityBuilder, ) + # model to get accesstoken for oauth from .endpoint.oauth_model import OAuthModelEndpointBuilder from .endpoint.single_model import ( @@ -158,7 +158,8 @@ def _parse_field(self, content): default=content.get("defaultValue"), ) return RestFieldBuilder( - field, self._parse_validation(content.get("validators")), + field, + self._parse_validation(content.get("validators")), ) def _parse_validation(self, validation): @@ -224,7 +225,8 @@ def build(self): arguments = arguments or {} self._validators.append( self._validation_template.format( - validator=validator, arguments=self._arguments(**arguments), + validator=validator, + arguments=self._arguments(**arguments), ) ) @@ -240,7 +242,10 @@ def _arguments(cls, **kwargs): if not kwargs: return "" args = list( - map(lambda k_v: "{}={}, ".format(k_v[0], k_v[1]), list(kwargs.items()),) + map( + lambda k_v: "{}={}, ".format(k_v[0], k_v[1]), + list(kwargs.items()), + ) ) args.insert(0, "") args.append("") @@ -308,7 +313,9 @@ def url(cls, validation): def multiple_validators(cls, validators): validators_str = ", \n".join(validators) _template = """validator.AllOf(\n{validators}\n)""" - return _template.format(validators=indent(validators_str),) + return _template.format( + validators=indent(validators_str), + ) GlobalConfigValidation.validation_mapping = { @@ -359,8 +366,14 @@ def third_path(self): return self.schema.namespace def default_to_local(self): - default_dir = op.join(self.root_path, self.builder.output.default,) - local_dir = op.join(self.root_path, self.output_local,) + default_dir = op.join( + self.root_path, + self.builder.output.default, + ) + local_dir = op.join( + self.root_path, + self.output_local, + ) if not op.isdir(local_dir): os.makedirs(local_dir) for i in os.listdir(default_dir): @@ -384,7 +397,9 @@ def import_declare_py_content(self): self.builder.output.bin, self.import_declare_py_name() + ".py", ) - content = self._import_declare_content.format(ta_name=self.schema.product,) + content = self._import_declare_content.format( + ta_name=self.schema.product, + ) with open(import_declare_file, "w") as f: f.write(content) diff --git a/splunk_add_on_ucc_framework/uccrestbuilder/rest_conf.py b/splunk_add_on_ucc_framework/uccrestbuilder/rest_conf.py index c9fc5c28f..8fcff908c 100644 --- a/splunk_add_on_ucc_framework/uccrestbuilder/rest_conf.py +++ b/splunk_add_on_ucc_framework/uccrestbuilder/rest_conf.py @@ -15,10 +15,6 @@ # - - - - class RestmapConf: _admin_template = """ @@ -39,22 +35,22 @@ class RestmapConf: @classmethod def build(cls, endpoints, namespace, admin_match): if not endpoints: - return '' + return "" externals = [ cls._admin_template.format( namespace=namespace, admin_match=admin_match, - endpoints=', '.join([ep.name for ep in endpoints]) + endpoints=", ".join([ep.name for ep in endpoints]), ) ] for endpoint in endpoints: external = cls._external_template.format( name=endpoint.name, rh_name=endpoint.rh_name, - actions=', '.join(endpoint.actions()), + actions=", ".join(endpoint.actions()), ) externals.append(external) - return ''.join(externals) + return "".join(externals) @classmethod def admin_externals(cls, endpoints): @@ -99,9 +95,6 @@ def build(cls, endpoints): ) # add splunkd data endpoint stanzas.append( - cls._internal_template.format( - name='_splunkd_data', - endpoint='data/*' - ) + cls._internal_template.format(name="_splunkd_data", endpoint="data/*") ) - return ''.join(stanzas) + return "".join(stanzas) diff --git a/tests/__init__.py b/tests/__init__.py index b9ea4650a..46020c32c 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -14,4 +14,4 @@ # limitations under the License. # # -# SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +# SPDX-License-Identifier: Apache-2.0 diff --git a/tests/conftest.py b/tests/conftest.py index 80afddce8..26d627f6e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -30,7 +30,6 @@ def pytest_configure(config): config.addinivalue_line("markers", "docker: Test search time only") - @pytest.fixture(scope="session") def docker_compose_files(request): """ @@ -42,21 +41,26 @@ def docker_compose_files(request): docker_compose_path = os.path.join( str(request.config.invocation_dir), "docker-compose.yml" ) - #LOGGER.info("docker-compose path: %s", docker_compose_path) + # LOGGER.info("docker-compose path: %s", docker_compose_path) return [docker_compose_path] + @pytest.fixture(scope="session") def get_session_key(splunk, request): - - + uri = f'https://{splunk["host"]}:{splunk["port"]}/services/auth/login' _rest_client = SplunkRestClient( - None, '-', 'nobody', "https", splunk["host"], splunk["port"]) + None, "-", "nobody", "https", splunk["host"], splunk["port"] + ) try: response = _rest_client.http.post( - uri, username=splunk["username"], password=splunk["password"],output_mode='json') + uri, + username=splunk["username"], + password=splunk["password"], + output_mode="json", + ) except binding.HTTPError as e: raise - return uri, json.loads(response.body.read())['sessionKey'] \ No newline at end of file + return uri, json.loads(response.body.read())["sessionKey"] diff --git a/tests/data/test_ucc_generate.py b/tests/data/test_ucc_generate.py index 9afa46975..2366137e3 100644 --- a/tests/data/test_ucc_generate.py +++ b/tests/data/test_ucc_generate.py @@ -11,7 +11,9 @@ def test_ucc_generate(self): def test_ucc_generate_with_custom_output_folder(self): package_folder = path.join(path.dirname(path.realpath(__file__)), "package") - output_folder = path.join(path.dirname(path.realpath(__file__)), "custom_output") + output_folder = path.join( + path.dirname(path.realpath(__file__)), "custom_output" + ) ucc.generate(source=package_folder, outputdir=output_folder)