Skip to content

Commit

Permalink
test: Add directory listing tests. chore: Avoid single-letter variabl…
Browse files Browse the repository at this point in the history
…e names.
  • Loading branch information
jpmckinney committed Jul 22, 2024
1 parent 63f1cb5 commit 543c42b
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 39 deletions.
17 changes: 8 additions & 9 deletions docs/news.rst
Original file line number Diff line number Diff line change
Expand Up @@ -64,15 +64,12 @@ Library

- ``sorted_versions`` to ``scrapyd.eggstorage``
- ``get_crawl_args`` to ``scrapyd.launcher``
- ``JsonResource`` to ``scrapyd.webservice``

- Move the ``activate_egg`` function from the ``scrapyd.eggutils`` module to its caller, the ``scrapyd.runner`` module.
- Move the ``job_items_url`` and ``job_log_url`` functions from the ``scrapyd.jobstorage`` module to the ``scrapyd.utils`` module. :ref:`jobstorage` is not responsible for URLs.
- Change the ``scrapyd.app.create_wrapped_resource`` function to a ``scrapyd.basicauth.wrap_resource`` function.
- Change the ``get_spider_list`` function to a ``SpiderList`` class.
- Merge the ``JsonResource`` class into the ``WsResource`` class, removing the ``render_object`` method.
- Remove the unused keyword arguments from the ``native_stringify_dict`` and ``to_native_str`` functions.
- Remove the ``JsonSqliteDict`` and ``UtilsCache`` classes.

Fixed
~~~~~
Expand Down Expand Up @@ -110,13 +107,15 @@ Scrapyd is now tested on macOS and Windows, in addition to Linux.
Removed
~~~~~~~

Removed undocumented and unused internal environment variables:
- Remove the unused keyword arguments from the ``native_stringify_dict`` and ``to_native_str`` functions.
- Remove the ``JsonSqliteDict`` and ``UtilsCache`` classes.
- Remove undocumented and unused internal environment variables:

- ``SCRAPY_FEED_URI`` to ``SCRAPYD_FEED_URI``
- ``SCRAPY_JOB`` to ``SCRAPYD_JOB``
- ``SCRAPY_LOG_FILE`` to ``SCRAPYD_LOG_FILE``
- ``SCRAPY_SLOT`` to ``SCRAPYD_SLOT``
- ``SCRAPY_SPIDER`` to ``SCRAPYD_SPIDER``
- ``SCRAPY_FEED_URI`` to ``SCRAPYD_FEED_URI``
- ``SCRAPY_JOB`` to ``SCRAPYD_JOB``
- ``SCRAPY_LOG_FILE`` to ``SCRAPYD_LOG_FILE``
- ``SCRAPY_SLOT`` to ``SCRAPYD_SLOT``
- ``SCRAPY_SPIDER`` to ``SCRAPYD_SPIDER``

1.4.3 (2023-09-25)
------------------
Expand Down
10 changes: 7 additions & 3 deletions integration_tests/test_website.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import pytest

from integration_tests import req


Expand All @@ -8,9 +10,11 @@ def test_root():
assert '"/logs/"' in response.text


def test_paths():
for page in ("/jobs", "/logs"):
req("get", page)
@pytest.mark.parametrize(("path", "content"), [("jobs", "Cancel"), ("logs", "Last modified")])
def test_paths(path, content):
response = req("get", f"/{path}")

assert content in response.text


def test_base_path():
Expand Down
50 changes: 25 additions & 25 deletions scrapyd/website.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,8 +274,8 @@ def gen_css(self):

def prep_row(self, cells):
if isinstance(cells, dict):
cells = [cells.get(k) for k in self.header_cols]
cells = [f"<td>{'' if c is None else c}</td>" for c in cells]
cells = [cells.get(key) for key in self.header_cols]
cells = [f"<td>{'' if cell is None else cell}</td>" for cell in cells]
return f"<tr>{''.join(cells)}</tr>"

def prep_doc(self):
Expand Down Expand Up @@ -314,48 +314,48 @@ def prep_tab_pending(self):
self.prep_row(
{
"Project": escape(project),
"Spider": escape(m["name"]),
"Job": escape(m["_job"]),
"Cancel": cancel_button(project=project, jobid=m["_job"], base_path=self.base_path),
"Spider": escape(message["name"]),
"Job": escape(message["_job"]),
"Cancel": cancel_button(project=project, jobid=message["_job"], base_path=self.base_path),
}
)
for project, queue in self.root.scheduler.queues.items()
for m in queue.list()
for message in queue.list()
)

def prep_tab_running(self):
return "\n".join(
self.prep_row(
{
"Project": escape(p.project),
"Spider": escape(p.spider),
"Job": escape(p.job),
"PID": p.pid,
"Start": microsec_trunc(p.start_time),
"Runtime": microsec_trunc(datetime.now() - p.start_time),
"Log": f'<a href="{self.base_path}{job_log_url(p)}">Log</a>',
"Items": f'<a href="{self.base_path}{job_items_url(p)}">Items</a>',
"Cancel": cancel_button(project=p.project, jobid=p.job, base_path=self.base_path),
"Project": escape(process.project),
"Spider": escape(process.spider),
"Job": escape(process.job),
"PID": process.pid,
"Start": microsec_trunc(process.start_time),
"Runtime": microsec_trunc(datetime.now() - process.start_time),
"Log": f'<a href="{self.base_path}{job_log_url(process)}">Log</a>',
"Items": f'<a href="{self.base_path}{job_items_url(process)}">Items</a>',
"Cancel": cancel_button(project=process.project, jobid=process.job, base_path=self.base_path),
}
)
for p in self.root.launcher.processes.values()
for process in self.root.launcher.processes.values()
)

def prep_tab_finished(self):
return "\n".join(
self.prep_row(
{
"Project": escape(p.project),
"Spider": escape(p.spider),
"Job": escape(p.job),
"Start": microsec_trunc(p.start_time),
"Runtime": microsec_trunc(p.end_time - p.start_time),
"Finish": microsec_trunc(p.end_time),
"Log": f'<a href="{self.base_path}{job_log_url(p)}">Log</a>',
"Items": f'<a href="{self.base_path}{job_items_url(p)}">Items</a>',
"Project": escape(job.project),
"Spider": escape(job.spider),
"Job": escape(job.job),
"Start": microsec_trunc(job.start_time),
"Runtime": microsec_trunc(job.end_time - job.start_time),
"Finish": microsec_trunc(job.end_time),
"Log": f'<a href="{self.base_path}{job_log_url(job)}">Log</a>',
"Items": f'<a href="{self.base_path}{job_items_url(job)}">Items</a>',
}
)
for p in self.root.launcher.finished
for job in self.root.launcher.finished
)

def render(self, txrequest):
Expand Down
56 changes: 54 additions & 2 deletions tests/test_website.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,66 @@
import os

from twisted.web import resource
from twisted.web.test._util import _render
from twisted.web.test.requesthelper import DummyRequest

from scrapyd.jobstorage import Job
from scrapyd.launcher import ScrapyProcessProtocol
from tests import has_settings


# Derived from test_emptyChildUnicodeParent.
# https://github.com/twisted/twisted/blob/trunk/src/twisted/web/test/test_static.py
def test_render_logs_dir(txrequest, root):
os.makedirs(os.path.join("logs", "quotesbot"))

file = root.children[b"logs"]
request = DummyRequest([b""])
child = resource.getChildForRequest(file, request)

content = child.render(request)

assert b"<th>Last modified</th>" in content
assert b'<td><a href="quotesbot/">quotesbot/</a></td>' in content


# Derived from test_indexNames.
# https://github.com/twisted/twisted/blob/trunk/src/twisted/web/test/test_static.py
def test_render_logs_file(txrequest, root):
os.makedirs(os.path.join("logs", "quotesbot"))
with open(os.path.join("logs", "foo.bar"), "wb") as f:
f.write(b"baz")

file = root.children[b"logs"]
request = DummyRequest([b"foo.bar"])
child = resource.getChildForRequest(file, request)

d = _render(child, request)

def cbRendered(ignored):
assert list(request.responseHeaders.getAllRawHeaders()) == [
(b"Accept-Ranges", [b"bytes"]),
(b"Content-Length", [b"3"]),
(b"Content-Type", [b"text/plain"]),
]
assert b"".join(request.written) == b"baz"

d.addCallback(cbRendered)
return d


def test_render_jobs(txrequest, root_with_egg):
root_with_egg.launcher.finished.add(Job("p1", "s1", "j1"))
root_with_egg.launcher.processes[0] = ScrapyProcessProtocol("p2", "s2", "j2", {}, [])
root_with_egg.scheduler.queues["quotesbot"].add("quotesbot", _job="j3")

content = root_with_egg.children[b"jobs"].render(txrequest)
expect_headers = {
b"Content-Type": [b"text/html; charset=utf-8"],
b"Content-Length": [b"643"],
b"Content-Length": [b"1744"],
}
if root_with_egg.local_items:
expect_headers[b"Content-Length"] = [b"601"]
expect_headers[b"Content-Length"] = [b"1702"]

headers = dict(txrequest.responseHeaders.getAllRawHeaders())

Expand Down

0 comments on commit 543c42b

Please sign in to comment.