diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b8f372bc..091da29c 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -6,10 +6,10 @@ on: jobs: - centos-8: - name: CentOS 8 + alma-8: + name: AlmaLinux 8 runs-on: ubuntu-latest - container: centos:8 + container: almalinux:8 steps: - run: | yum -y install \ @@ -54,58 +54,58 @@ jobs: - name: Python 3 integration tests run: PYTHONPATH=. python3 ./tests/integration_tests.py - centos-6: - name: CentOS 6 + fedora-34: + name: Fedora 34 runs-on: ubuntu-latest - container: centos:6 + container: fedora:34 steps: - - run: yum -y install epel-release - run: | yum -y install \ - python34-pytest \ - python34-yaml - - uses: actions/checkout@v1 + python3 \ + python3-pytest \ + python3-pyyaml + - uses: actions/checkout@v2 - name: Python 3 unit tests - run: PYTHONPATH=. py.test-3 + run: PYTHONPATH=. pytest-3 - name: Python 3 integration tests run: PYTHONPATH=. python3 ./tests/integration_tests.py - fedora-31: - name: Fedora 31 + fedora-35: + name: Fedora 35 runs-on: ubuntu-latest - container: fedora:31 + container: fedora:35 steps: - run: | yum -y install \ - python2-pytest \ - python2-pyyaml \ python3 \ python3-pytest \ python3-pyyaml - - uses: actions/checkout@v1 - - - name: Python 2 unit tests - run: PYTHONPATH=. pytest-2 - - name: Python 2 integration tests - run: PYTHONPATH=. python2 ./tests/integration_tests.py + - uses: actions/checkout@v2 - name: Python 3 unit tests run: PYTHONPATH=. pytest-3 - name: Python 3 integration tests run: PYTHONPATH=. python3 ./tests/integration_tests.py - fedora-32: - name: Fedora 32 + ubuntu-2004: + name: Ubuntu 20.04 runs-on: ubuntu-latest - container: fedora:32 + container: ubuntu:20.04 steps: + - run: apt update - run: | - yum -y install \ - python3 \ + apt -y install \ + python-pytest \ + python-yaml \ python3-pytest \ - python3-pyyaml - - uses: actions/checkout@v2 + python3-yaml + - uses: actions/checkout@v1 + + - name: Python 2 unit tests + run: PYTHONPATH=. pytest + - name: Python 2 integration tests + run: PYTHONPATH=. python2 ./tests/integration_tests.py - name: Python 3 unit tests run: PYTHONPATH=. pytest-3 @@ -213,10 +213,8 @@ jobs: runs-on: macos-latest steps: - run: brew install python - - run: pip install PyYAML - - run: pip install pytest + - run: pip3 install PyYAML + - run: pip3 install pytest - uses: actions/checkout@v1 - run: PYTHONPATH=. pytest - - run: PYTHONPATH=. python ./tests/integration_tests.py - - + - run: PYTHONPATH=. python3 ./tests/integration_tests.py diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 0b74c709..00000000 --- a/.travis.yml +++ /dev/null @@ -1,17 +0,0 @@ -language: python - -python: - - "2.7" - - "3.5" - - "3.6" - -# This is required to test on Python 3.7 at the moment. -matrix: - include: - - python: 3.7 - dist: xenial - sudo: true - -script: - - PYTHONPATH=. pytest - - PYTHONPATH=. python ./tests/integration_tests.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 7fc6b285..f69c767d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,31 @@ # Change Log +## unreleased +- Be consistent about warning about old index. The index won't be automatically + updated now in some cases and not in others. Instead opt to never + auto-update: https://redmine.openinfosecfoundation.org/issues/3249 +- Better flowbit resolution logging in verbose mode + https://redmine.openinfosecfoundation.org/issues/3205 +- Hide advanced command line options from help output: + https://redmine.openinfosecfoundation.org/issues/3974 +- Fix multiple modifications to a rule: + https://redmine.openinfosecfoundation.org/issues/4259 +- Allow spaces in custom HTTP headers. Redmine issue + https://redmine.openinfosecfoundation.org/issues/4362 +- Fix "no-test" when set in configuration file: + https://redmine.openinfosecfoundation.org/issues/4493 +- Allow more custom characters in custom http header to allow for more + of the base64 character set: + https://redmine.openinfosecfoundation.org/issues/4701 +- Send custom HTTP headers with check for remote checksum file: + https://redmine.openinfosecfoundation.org/issues/4001 +- Fix "check-versions" where the running Suricata is newer than what the index + knows about: https://redmine.openinfosecfoundation.org/issues/4373 + +## 1.2.1 - 2021-02-23 +- Fix --no-merge. Redmine issue + https://redmine.openinfosecfoundation.org/issues/4324. + ## 1.2.0 - 2020-10-05 - Documentation updates. diff --git a/doc/add-source.rst b/doc/add-source.rst index e5b2a532..c49692c2 100644 --- a/doc/add-source.rst +++ b/doc/add-source.rst @@ -24,6 +24,16 @@ Options as a custom API key. Example:: add-source --http-header "X-API-Key: 1234" + + HTTP basic authentication can be achieved by setting the HTTP Basic + Authentication header with ``base64(user1:password1)``. Example:: + + add-source --http-header "Authorization: Basic dXNlcjE6cGFzc3dvcmQx" + + HTTP Bearer authentication can be used by setting the HTTP Bearer Authentication header + with a OAuth2 token containing printable ASCII characters. Example:: + + add-source --http-header "Auhorization: Bearer NjA2MTUOTAx?D+wOm4U/vpXQy0xhl!hSaR7#ENVpK59" .. option:: --no-checksum diff --git a/doc/common-options.rst b/doc/common-options.rst index bd08cf0e..d56df99d 100644 --- a/doc/common-options.rst +++ b/doc/common-options.rst @@ -44,3 +44,8 @@ .. option:: --user-agent Set a custom user agent string for HTTP requests. + +.. option:: -s, --show-advanced + + Show advanced options. + \ No newline at end of file diff --git a/doc/quickstart.rst b/doc/quickstart.rst index dabc1876..0f17a7b1 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -122,7 +122,8 @@ This command will: .. note:: Suricata-Update is also capable of triggering a rule reload, but doing so requires some extra configuration that will be - covered later. + covered later. See the documentation of + :command:`--reload-command=` for more details. Configure Suricata to Load Suricata-Update Managed Rules ======================================================== diff --git a/doc/update.rst b/doc/update.rst index 625f5a2b..19ab5257 100644 --- a/doc/update.rst +++ b/doc/update.rst @@ -30,11 +30,6 @@ Options be due to just recently downloaded, or the remote checksum matching the cached copy. -.. option:: --merged= - - Write a single file containing all rules. This can be used in - addition to ``--output`` or instead of ``--output``. - .. option:: --no-merge Do not merge the rules into a single rule file. @@ -158,12 +153,26 @@ Options .. option:: --reload-command= A command to run after the rules have been updated; will not run if - no change to the output files was made. For example:: + no change to the output files was made. For example:: - --reload-command='sudo kill -USR2 $(cat /var/run/suricata.pid)' + --reload-command='sudo kill -USR2 $(pidof suricata)' will tell Suricata to reload its rules. + Furthermore the reload can be triggered using the Unix socket of Suricata. + + Blocking reload (with Suricata waiting for the reload to finish):: + + --reload-command='sudo suricatasc -c reload-rules' + + Non blocking reload (without restarting Suricata):: + + --reload-command='sudo suricatasc -c ruleset-reload-nonblocking' + + See the Suricata documentation on `Rule Reloads + `_ + for more information. + .. option:: --no-reload Disable Suricata rule reload. diff --git a/suricata/update/commands/checkversions.py b/suricata/update/commands/checkversions.py index 98a0f385..34923170 100644 --- a/suricata/update/commands/checkversions.py +++ b/suricata/update/commands/checkversions.py @@ -21,6 +21,26 @@ logger = logging.getLogger() +def is_gt(v1, v2): + if v1.full == v2.full: + return False + + if v1.major < v2.major: + return False + elif v1.major > v2.major: + return True + + if v1.minor < v2.minor: + return False + elif v1.minor > v2.minor: + return True + + if v1.patch < v2.patch: + return False + + return True + + def register(parser): parser.set_defaults(func=check_version) @@ -42,7 +62,7 @@ def check_version(suricata_version): logger.error("Recommended version was not parsed properly") sys.exit(1) # In case index is out of date - if float(suricata_version.short) > float(recommended.short): + if is_gt(suricata_version, recommended): return # Evaluate if the installed version is present in index upgrade_version = version["suricata"].get(suricata_version.short) diff --git a/suricata/update/commands/disablesource.py b/suricata/update/commands/disablesource.py index 6a64a7bd..33a43e2a 100644 --- a/suricata/update/commands/disablesource.py +++ b/suricata/update/commands/disablesource.py @@ -36,5 +36,5 @@ def disable_source(): logger.warning("Source %s is not enabled.", name) return 0 logger.debug("Renaming %s to %s.disabled.", filename, filename) - os.rename(filename, "%s.disabled" % (filename)) + shutil.move(filename, "%s.disabled" % (filename)) logger.info("Source %s has been disabled", name) diff --git a/suricata/update/commands/enablesource.py b/suricata/update/commands/enablesource.py index 53bb68a3..e9396f24 100644 --- a/suricata/update/commands/enablesource.py +++ b/suricata/update/commands/enablesource.py @@ -53,7 +53,7 @@ def enable_source(): disabled_source_filename = sources.get_disabled_source_filename(name) if os.path.exists(disabled_source_filename): logger.info("Re-enabling previously disabled source for %s.", name) - os.rename(disabled_source_filename, enabled_source_filename) + shutil.move(disabled_source_filename, enabled_source_filename) update_params = True if not os.path.exists(sources.get_index_filename()): diff --git a/suricata/update/commands/listsources.py b/suricata/update/commands/listsources.py index 103d77bd..d35c3cd2 100644 --- a/suricata/update/commands/listsources.py +++ b/suricata/update/commands/listsources.py @@ -21,7 +21,6 @@ from suricata.update import config from suricata.update import sources from suricata.update import util -from suricata.update.commands.updatesources import update_sources from suricata.update import exceptions logger = logging.getLogger() @@ -72,11 +71,9 @@ def list_sources(): free_only = config.args().free if not sources.source_index_exists(config): - logger.info("No source index found, running update-sources") - try: - update_sources() - except exceptions.ApplicationError as err: - logger.warning("%s: will use bundled index.", err) + logger.warning("Source index does not exist, will use bundled one.") + logger.warning("Please run suricata-update update-sources.") + index = sources.load_source_index(config) for name, source in index.get_sources().items(): is_not_free = source.get("subscribe-url") diff --git a/suricata/update/configs/update.yaml b/suricata/update/configs/update.yaml index e113f539..358e869d 100644 --- a/suricata/update/configs/update.yaml +++ b/suricata/update/configs/update.yaml @@ -36,6 +36,8 @@ ignore: # Provide a command to reload the Suricata rules. # May be overrided by the --reload-command command line option. +# See the documentation of --reload-command for the different options +# to reload Suricata rules. #reload-command: sudo systemctl reload suricata # Remote rule sources. Simply a list of URLs. diff --git a/suricata/update/data/update.py b/suricata/update/data/update.py index 1d05df4f..e57c1469 100644 --- a/suricata/update/data/update.py +++ b/suricata/update/data/update.py @@ -15,6 +15,7 @@ # 02110-1301, USA. import os.path +import sys try: from urllib2 import urlopen @@ -23,11 +24,16 @@ import yaml +DEFAULT_URL = "https://raw.githubusercontent.com/oisf/suricata-intel-index/master/index.yaml" + def embed_index(): """Embed a copy of the index as a Python source file. We can't use a datafile yet as there is no easy way to do with distutils.""" + if len(sys.argv) > 1: + url = sys.argv[1] + else: + url = DEFAULT_URL dist_filename = os.path.join(os.path.dirname(__file__), "index.py") - url = "https://raw.githubusercontent.com/oisf/suricata-intel-index/master/index.yaml" response = urlopen(url) index = yaml.safe_load(response.read()) diff --git a/suricata/update/main.py b/suricata/update/main.py index 41cf0cb6..3aff24fb 100644 --- a/suricata/update/main.py +++ b/suricata/update/main.py @@ -105,13 +105,15 @@ def __init__(self): self.istty = os.isatty(sys.stdout.fileno()) def check_checksum(self, tmp_filename, url): + print(url) try: - checksum_url = url + ".md5" + checksum_url = url[0] + ".md5" + net_arg=(checksum_url,url[1]) local_checksum = hashlib.md5( open(tmp_filename, "rb").read()).hexdigest().strip() remote_checksum_buf = io.BytesIO() logger.info("Checking %s." % (checksum_url)) - net.get(checksum_url, remote_checksum_buf) + net.get(net_arg, remote_checksum_buf) remote_checksum = remote_checksum_buf.getvalue().decode().strip() logger.debug("Local checksum=|%s|; remote checksum=|%s|" % ( local_checksum, remote_checksum)) @@ -174,7 +176,7 @@ def fetch(self, url): url) return self.extract_files(tmp_filename) if checksum: - if self.check_checksum(tmp_filename, url): + if self.check_checksum(tmp_filename, net_arg): logger.info("Remote checksum has not changed. " "Not fetching.") return self.extract_files(tmp_filename) @@ -182,13 +184,14 @@ def fetch(self, url): os.makedirs(config.get_cache_dir(), mode=0o770) logger.info("Fetching %s." % (url)) try: - tmp_fileobj = tempfile.NamedTemporaryFile() + tmp_fileobj = tempfile.NamedTemporaryFile(delete=False) net.get( net_arg, tmp_fileobj, progress_hook=self.progress_hook) - shutil.copyfile(tmp_fileobj.name, tmp_filename) tmp_fileobj.close() + shutil.copyfile(tmp_fileobj.name, tmp_filename) + os.unlink(tmp_fileobj.name) except URLError as err: if os.path.exists(tmp_filename): logger.warning( @@ -496,7 +499,7 @@ def write_merged(filename, rulemap, dep_files): else: handle_filehash_files(rule, dep_files, kw) print(rule.format(), file=fileobj) - os.rename(tmp_filename, filename) + shutil.move(tmp_filename, filename) def write_to_directory(directory, files, rulemap, dep_files): # List of rule IDs that have been added. @@ -508,9 +511,9 @@ def write_to_directory(directory, files, rulemap, dep_files): oldset = {} if not args.quiet: - for filename in files: + for file in files: outpath = os.path.join( - directory, os.path.basename(filename)) + directory, os.path.basename(file.filename)) if os.path.exists(outpath): for rule in rule_mod.parse_file(outpath): @@ -533,15 +536,15 @@ def write_to_directory(directory, files, rulemap, dep_files): len(removed), len(modified))) - for filename in sorted(files): + for file in sorted(files): outpath = os.path.join( - directory, os.path.basename(filename)) + directory, os.path.basename(file.filename)) logger.debug("Writing %s." % outpath) - if not filename.endswith(".rules"): - open(outpath, "wb").write(files[filename]) + if not file.filename.endswith(".rules"): + open(outpath, "wb").write(file.content) else: content = [] - for line in io.StringIO(files[filename].decode("utf-8")): + for line in io.StringIO(file.content.decode("utf-8")): rule = rule_mod.parse(line) if not rule: content.append(line.strip()) @@ -552,11 +555,17 @@ def write_to_directory(directory, files, rulemap, dep_files): handle_dataset_files(rule, dep_files) else: handle_filehash_files(rule, dep_files, kw) - content.append(rulemap[rule.id].format()) + if rule.id in rulemap: + content.append(rulemap[rule.id].format()) + else: + # Just pass the input through. Most likey a + # rule from a file that was ignored, but we'll + # still pass it through. + content.append(line.strip()) tmp_filename = ".".join([outpath, "tmp"]) io.open(tmp_filename, encoding="utf-8", mode="w").write( u"\n".join(content)) - os.rename(tmp_filename, outpath) + shutil.move(tmp_filename, outpath) def write_yaml_fragment(filename, files): logger.info( @@ -619,13 +628,15 @@ def dump_sample_configs(): def resolve_flowbits(rulemap, disabled_rules): flowbit_resolver = rule_mod.FlowbitResolver() flowbit_enabled = set() + pass_ = 1 while True: + logger.debug("Checking flowbits for pass %d of rules.", pass_) flowbits = flowbit_resolver.get_required_flowbits(rulemap) logger.debug("Found %d required flowbits.", len(flowbits)) required_rules = flowbit_resolver.get_required_rules(rulemap, flowbits) logger.debug( - "Found %d rules to enable to for flowbit requirements", - len(required_rules)) + "Found %d rules to enable for flowbit requirements (pass %d)", + len(required_rules), pass_) if not required_rules: logger.debug("All required rules enabled.") break @@ -637,6 +648,7 @@ def resolve_flowbits(rulemap, disabled_rules): rule.enabled = True rule.noalert = True flowbit_enabled.add(rule) + pass_ = pass_ + 1 logger.info("Enabled %d rules for flowbit dependencies." % ( len(flowbit_enabled))) @@ -1171,6 +1183,12 @@ def _main(): for key, rule in rulemap.items(): + # To avoid duplicate counts when a rule has more than one modification + # to it, we track the actions here then update the counts at the end. + enabled = False + modified = False + dropped = False + for matcher in disable_matchers: if rule.enabled and matcher.match(rule): logger.debug("Disabling: %s" % (rule.brief())) @@ -1181,19 +1199,26 @@ def _main(): if not rule.enabled and matcher.match(rule): logger.debug("Enabling: %s" % (rule.brief())) rule.enabled = True - enable_count += 1 + enabled = True for fltr in drop_filters: if fltr.match(rule): - rulemap[rule.id] = fltr.run(rule) - drop_count += 1 + rule = fltr.run(rule) + dropped = True for fltr in modify_filters: if fltr.match(rule): - new_rule = fltr.run(rule) - if new_rule: - rulemap[rule.id] = new_rule - modify_count += 1 + rule = fltr.run(rule) + modified = True + + if enabled: + enable_count += 1 + if modified: + modify_count += 1 + if dropped: + drop_count += 1 + + rulemap[key] = rule # Check if we should disable ja3 rules. try: @@ -1234,10 +1259,10 @@ def _main(): file_tracker.add(output_filename) write_merged(os.path.join(output_filename), rulemap, dep_files) else: - for filename in files: + for file in files: file_tracker.add( os.path.join( - config.get_output_dir(), os.path.basename(filename))) + config.get_output_dir(), os.path.basename(file.filename))) write_to_directory(config.get_output_dir(), files, rulemap, dep_files) manage_classification(suriconf, classification_files) diff --git a/suricata/update/net.py b/suricata/update/net.py index 49a58cf5..eac060e9 100644 --- a/suricata/update/net.py +++ b/suricata/update/net.py @@ -91,14 +91,14 @@ def is_header_clean(header): if len(header) != 2: return False name, val = header[0].strip(), header[1].strip() - if re.match( r"^[\w-]+$", name) and re.match(r"^[\w-]+$", val): + if re.match( r"^[\w-]+$", name) and re.match(r"^[\w\s -~]+$", val): return True return False def get(url, fileobj, progress_hook=None): """ Perform a GET request against a URL writing the contents into - the provideded file like object. + the provided file-like object. :param url: The URL to fetch :param fileobj: The fileobj to write the content to diff --git a/suricata/update/parsers.py b/suricata/update/parsers.py index 6d86e455..adda46b2 100644 --- a/suricata/update/parsers.py +++ b/suricata/update/parsers.py @@ -31,6 +31,11 @@ default_update_yaml = config.DEFAULT_UPDATE_YAML_PATH +show_advanced = False + +if "-s" in sys.argv or "--show-advanced" in sys.argv: + show_advanced = True + # Global arguments - command line options for suricata-update global_arg = [ (("-v", "--verbose"), @@ -56,13 +61,18 @@ 'help': "Override Suricata version"}), (("--user-agent",), {'metavar': '', - 'help': "Set custom user-agent string"}), + 'help': "Set custom user-agent string" + if show_advanced else argparse.SUPPRESS}), (("--no-check-certificate",), {'action': 'store_true', 'default': None, - 'help': "Disable server SSL/TLS certificate verification"}), + 'help': "Disable server SSL/TLS certificate verification" + if show_advanced else argparse.SUPPRESS}), (("-V", "--version"), {'action': 'store_true', 'default': False, - 'help': "Display version"}) + 'help': "Display version"}), + (("-s","--show-advanced"), + {'action': 'store_true', + 'help': "Show advanced options"}), ] # Update arguments - command line options for suricata-update @@ -74,22 +84,27 @@ {'action': 'store_true', 'default': False, 'help': "Force operations that might otherwise be skipped"}), (("--yaml-fragment",), - {'metavar': '', - 'help': "Output YAML fragment for rule inclusion"}), + {'metavar': '', + 'help': "Output YAML fragment for rule inclusion" + if show_advanced else argparse.SUPPRESS}), (("--url",), {'metavar': '', 'action': 'append', 'default': [], 'help': "URL to use instead of auto-generating one " - "(can be specified multiple times)"}), + "(can be specified multiple times)" + if show_advanced else argparse.SUPPRESS}), (("--local",), {'metavar': '', 'action': 'append', 'default': [], 'help': "Local rule files or directories " - "(can be specified multiple times)"}), + "(can be specified multiple times)" + if show_advanced else argparse.SUPPRESS}), (("--sid-msg-map",), - {'metavar': '', - 'help': "Generate a sid-msg.map file"}), + {'metavar': '', + 'help': "Generate a sid-msg.map file" + if show_advanced else argparse.SUPPRESS}), (("--sid-msg-map-2",), {'metavar': '', - 'help': "Generate a v2 sid-msg.map file"}), + 'help': "Generate a v2 sid-msg.map file" + if show_advanced else argparse.SUPPRESS}), (("--disable-conf",), {'metavar': '', @@ -107,37 +122,46 @@ (("--ignore",), {'metavar': '', 'action': 'append', 'default': None, 'help': "Filenames to ignore " - "(can be specified multiple times; default: *deleted.rules)"}), + "(can be specified multiple times; default: *deleted.rules)" + if show_advanced else argparse.SUPPRESS}), (("--no-ignore",), - {'action': 'store_true', 'default': False, - 'help': "Disables the ignore option."}), + {'action': 'store_true', 'default': False, + 'help': "Disables the ignore option." + if show_advanced else argparse.SUPPRESS}), (("--threshold-in",), {'metavar': '', - 'help': "Filename of rule thresholding configuration"}), + 'help': "Filename of rule thresholding configuration" + if show_advanced else argparse.SUPPRESS}), (("--threshold-out",), {'metavar': '', - 'help': "Output of processed threshold configuration"}), + 'help': "Output of processed threshold configuration" + if show_advanced else argparse.SUPPRESS}), (("--dump-sample-configs",), {'action': 'store_true', 'default': False, - 'help': "Dump sample config files to current directory"}), + 'help': "Dump sample config files to current directory" + if show_advanced else argparse.SUPPRESS}), (("--etopen",), {'action': 'store_true', - 'help': "Use ET-Open rules (default)"}), + 'help': "Use ET-Open rules (default)" + if show_advanced else argparse.SUPPRESS}), (("--reload-command",), {'metavar': '', - 'help': "Command to run after update if modified"}), + 'help': "Command to run after update if modified" + if show_advanced else argparse.SUPPRESS}), (("--no-reload",), {'action': 'store_true', 'default': False, 'help': "Disable reload"}), (("-T", "--test-command"), {'metavar': '', - 'help': "Command to test Suricata configuration"}), + 'help': "Command to test Suricata configuration" + if show_advanced else argparse.SUPPRESS}), (("--no-test",), - {'action': 'store_true', 'default': False, + {'action': 'store_true', 'default': None, 'help': "Disable testing rules with Suricata"}), (("--no-merge",), {'action': 'store_true', 'default': False, - 'help': "Do not merge the rules into a single file"}), + 'help': "Do not merge the rules into a single file" + if show_advanced else argparse.SUPPRESS}), (("--offline",), {'action': 'store_true', 'help': "Run offline using most recent cached rules"}), diff --git a/suricata/update/version.py b/suricata/update/version.py index 01c4c83a..1a4c44af 100644 --- a/suricata/update/version.py +++ b/suricata/update/version.py @@ -4,4 +4,4 @@ # Alpha: 1.0.0a1 # Development: 1.0.0dev0 # Release candidate: 1.0.0rc1 -version = "1.2.0" +version = "1.3.0dev0" diff --git a/tests/integration_tests.py b/tests/integration_tests.py index 77057533..89705853 100755 --- a/tests/integration_tests.py +++ b/tests/integration_tests.py @@ -2,25 +2,29 @@ import os import subprocess import shutil +import tempfile +import suricata.update.rule DATA_DIR = "./tests/tmp" + def run(args): subprocess.check_call(args) + def delete(path): if os.path.isdir(path): shutil.rmtree(path) else: os.unlink(path) + print("Python executable: %s" % sys.executable) print("Python version: %s" % str(sys.version)) print("Current directory: %s" % os.getcwd()) # Override the default source index URL to avoid hitting the network. -os.environ["SOURCE_INDEX_URL"] = "file://%s/tests/index.yaml" % ( - os.getcwd()) +os.environ["SOURCE_INDEX_URL"] = "file://%s/tests/index.yaml" % (os.getcwd()) os.environ["ETOPEN_URL"] = "file://%s/tests/emerging.rules.tar.gz" % ( os.getcwd()) @@ -31,56 +35,187 @@ def delete(path): common_args = [ sys.executable, "./bin/suricata-update", - "-D", DATA_DIR, - "-c", "./tests/empty", + "-D", + DATA_DIR, + "-c", + "./tests/empty", ] common_update_args = [ "--no-test", "--no-reload", - "--suricata-conf", "./tests/suricata.yaml", - "--disable-conf", "./tests/disable.conf", - "--enable-conf", "./tests/empty", - "--drop-conf", "./tests/empty", - "--modify-conf", "./tests/empty", + "--suricata-conf", + "./tests/suricata.yaml", + "--disable-conf", + "./tests/disable.conf", + "--enable-conf", + "./tests/empty", + "--drop-conf", + "./tests/empty", + "--modify-conf", + "./tests/empty", ] # Default run with data directory. run(common_args + common_update_args) -assert(os.path.exists(DATA_DIR)) -assert(os.path.exists(os.path.join(DATA_DIR, "update", "cache"))) -assert(os.path.exists(os.path.join(DATA_DIR, "rules", "suricata.rules"))) +assert (os.path.exists(DATA_DIR)) +assert (os.path.exists(os.path.join(DATA_DIR, "update", "cache"))) +assert (os.path.exists(os.path.join(DATA_DIR, "rules", "suricata.rules"))) + +# Default run with data directory and --no-merge +run(common_args + common_update_args + ["--no-merge"]) +assert (os.path.exists(DATA_DIR)) +assert (os.path.exists(os.path.join(DATA_DIR, "update", "cache"))) +assert (os.path.exists( + os.path.join(DATA_DIR, "rules", "emerging-deleted.rules"))) +assert (os.path.exists( + os.path.join(DATA_DIR, "rules", "emerging-current_events.rules"))) # Still a default run, but set --output to an alternate location." run(common_args + common_update_args + ["--output", "./tests/tmp/_rules"]) -assert(os.path.exists(os.path.join(DATA_DIR, "_rules"))) +assert (os.path.exists(os.path.join(DATA_DIR, "_rules"))) # Update sources. run(common_args + ["update-sources"]) -assert(os.path.exists(os.path.join(DATA_DIR, "update", "cache", "index.yaml"))) +assert (os.path.exists(os.path.join(DATA_DIR, "update", "cache", + "index.yaml"))) # Now delete the index and run lists-sources to see if it downloads # the index. delete(os.path.join(DATA_DIR, "update", "cache", "index.yaml")) run(common_args + ["list-sources"]) -assert(os.path.exists(os.path.join(DATA_DIR, "update", "cache", "index.yaml"))) +assert(not os.path.exists(os.path.join(DATA_DIR, "update", "cache", "index.yaml"))) # Enable a source. run(common_args + ["enable-source", "oisf/trafficid"]) -assert(os.path.exists( +assert (os.path.exists( os.path.join(DATA_DIR, "update", "sources", "oisf-trafficid.yaml"))) # Disable the source. run(common_args + ["disable-source", "oisf/trafficid"]) -assert(not os.path.exists( - os.path.join( - DATA_DIR, "update", "sources", "oisf-trafficid.yaml"))) -assert(os.path.exists( - os.path.join( - DATA_DIR, "update", "sources", "oisf-trafficid.yaml.disabled"))) +assert (not os.path.exists( + os.path.join(DATA_DIR, "update", "sources", "oisf-trafficid.yaml"))) +assert (os.path.exists( + os.path.join(DATA_DIR, "update", "sources", + "oisf-trafficid.yaml.disabled"))) # Remove the source. run(common_args + ["remove-source", "oisf/trafficid"]) -assert(not os.path.exists( - os.path.join( - DATA_DIR, "update", "sources", "oisf-trafficid.yaml.disabled"))) +assert (not os.path.exists( + os.path.join(DATA_DIR, "update", "sources", + "oisf-trafficid.yaml.disabled"))) + +# Add a source with a custom header. +run(common_args + [ + "add-source", "--http-header", "Header: NoSpaces", + "testing-header-nospaces", "file:///doesnotexist" +]) + +# Add a source with a custom header with spaces in the value +# (https://redmine.openinfosecfoundation.org/issues/4362) +run(common_args + [ + "add-source", "--http-header", "Authorization: Basic dXNlcjE6cGFzc3dvcmQx", + "testing-header-with-spaces", "file:///doesnotexist" +]) + + +class IntegrationTest: + def __init__(self, configs={}): + self.directory = tempfile.mkdtemp(dir=DATA_DIR) + self.configs = configs + self.args = [] + self.write_configs() + + if not "update.yaml" in self.configs: + self.args += ["-c", "./tests/empty"] + + def write_configs(self): + for config in self.configs: + config_filename = "%s/%s" % (self.directory, config) + with open(config_filename, "w") as of: + of.write(self.configs[config]) + if config == "modify.conf": + self.args += ["--modify-conf", config_filename] + elif config == "drop.conf": + self.args += ["--drop-conf", config_filename] + elif config == "enable.conf": + self.args += ["--enable-conf", config_filename] + elif config == "disable.conf": + self.args += ["--disable-conf", config_filename] + + def run(self): + args = [ + sys.executable, + "./bin/suricata-update", + "-D", + self.directory, + "--no-test", + "--no-reload", + "--suricata-conf", + "./tests/suricata.yaml", + ] + self.args + subprocess.check_call(args) + self.check() + self.clean() + + def clean(self): + if self.directory.startswith(DATA_DIR): + shutil.rmtree(self.directory) + + def check(self): + pass + + def get_rule_by_sid(self, sid): + """ Return all rules where the provided substring is found. """ + with open("%s/rules/suricata.rules" % (self.directory)) as inf: + for line in inf: + rule = suricata.update.rule.parse(line) + if rule.sid == sid: + return rule + return None + + +class MultipleModifyTest(IntegrationTest): + + configs = { + "modify.conf": + """ +modifysid emerging-exploit.rules "^alert" | "drop" +modifysid * "^drop(.*)noalert(.*)" | "alert${1}noalert${2}" + """ + } + + def __init__(self): + IntegrationTest.__init__(self, self.configs) + + def check(self): + # This rule should have been converted to drop. + rule1 = self.get_rule_by_sid(2103461) + assert(rule1.action == "drop") + + # This one should have been converted back to alert. + rule2 = self.get_rule_by_sid(2023184) + assert(rule2.action == "alert") + +class DropAndModifyTest(IntegrationTest): + + configs = { + "drop.conf": """ +2024029 + """, + "modify.conf": """ +2024029 "ET INFO" "TEST INFO" + """ + } + + def __init__(self): + IntegrationTest.__init__(self, self.configs) + + def check(self): + rule1 = self.get_rule_by_sid(2024029) + assert(rule1.action == "drop") + assert(rule1.msg.startswith("TEST INFO")) + + +MultipleModifyTest().run() +DropAndModifyTest().run() diff --git a/tests/test_main.py b/tests/test_main.py index 1425cd19..86fa486a 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -62,7 +62,12 @@ def test_check_checksum(self): os.path.dirname(os.path.realpath(__file__))) local_file = "%s/emerging.rules.tar.gz" % ( os.path.dirname(os.path.realpath(__file__))) - r = fetch.check_checksum(local_file, url) + + # The URL passed to check_checksum is actually a tuple: + # (url, custom-header, has checksum url) + net_arg = (url, None, True) + + r = fetch.check_checksum(local_file, net_arg) self.assertTrue(r) class ThresholdProcessorTestCase(unittest.TestCase): diff --git a/tox-integration.ini b/tox-integration.ini index 8926b1f4..dfa991f3 100644 --- a/tox-integration.ini +++ b/tox-integration.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py27, py35, py36, py37 +envlist = py27, py36, py37, py38 [testenv] commands = python ./tests/integration_tests.py diff --git a/tox.ini b/tox.ini index e599a77a..5ce1245d 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py27, py35, py36, py37 +envlist = py27, py36, py37, py38 [testenv] commands = pytest