Skip to content

Commit

Permalink
crawler tests: Add --max-urls argument to limit test time
Browse files Browse the repository at this point in the history
To limit the amount of URLs checked in the crawler tests,
one may now pass --max-urls=N to pytest.

Change-Id: I193336edc5031b3ef9aa174610bf673b28b9f2e1
  • Loading branch information
rene-slowenski-checkmk committed Sep 3, 2024
1 parent f5240c3 commit 923a367
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 4 deletions.
13 changes: 12 additions & 1 deletion tests/gui_crawl/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,29 @@
logger = logging.getLogger()


def pytest_addoption(parser):
parser.addoption(
"--max-urls",
action="store",
default=int(os.environ.get("GUI_CRAWLER_URL_LIMIT", "0")),
type=int,
help="Select only N URLs for the crawler tests (0=all).",
)


@pytest.fixture(name="test_site", scope="session")
def get_site() -> Generator[Site, None, None]:
yield from get_site_factory(prefix="crawl_").get_test_site()


@pytest.fixture(name="test_crawler", scope="session")
def crawler(test_site: Site) -> Generator[Crawler, None, None]:
def crawler(test_site: Site, pytestconfig: pytest.Config) -> Generator[Crawler, None, None]:
xss_crawl = os.environ.get("XSS_CRAWL", "0") == "1"
crawler_type = XssCrawler if xss_crawl else Crawler
test_crawler = crawler_type(
test_site,
report_file=os.environ.get("CRAWL_REPORT"),
max_urls=pytestconfig.getoption(name="--max-urls"),
)
try:
yield test_crawler
Expand Down
4 changes: 1 addition & 3 deletions tests/testlib/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,10 +179,8 @@ def __init__(self, test_site: Site, report_file: str | None, max_urls: int = 0)
"text/x-sh",
}

# override value using environment-variable
maxlen = int(os.environ.get("GUI_CRAWLER_URL_LIMIT", "0")) or max_urls
# limit minimum value to 0.
self._max_urls = max(0, maxlen)
self._max_urls = max(0, max_urls)
self._todos = deque([Url(self.site.internal_url)])

async def crawl(self, max_tasks: int) -> None:
Expand Down

0 comments on commit 923a367

Please sign in to comment.