From 7712e7ed34edfc8b55702f6418e60c351b855e1c Mon Sep 17 00:00:00 2001 From: Jan Vollmer Date: Tue, 29 Oct 2024 19:58:54 +0100 Subject: [PATCH 1/3] test every endpoint at least once for status code 200 and fix broken endpoints --- rq_dashboard/web.py | 22 +++++++-------- tests/test_basic.py | 65 ++++++++++++++++++++++++++++++++++++++++----- 2 files changed, 70 insertions(+), 17 deletions(-) diff --git a/rq_dashboard/web.py b/rq_dashboard/web.py index d4deb57..b9528ea 100644 --- a/rq_dashboard/web.py +++ b/rq_dashboard/web.py @@ -336,7 +336,7 @@ def workers_overview(instance_number): "rq_dashboard/workers.html", current_instance=instance_number, instance_list=escape_format_instance_list(current_app.config.get("RQ_DASHBOARD_REDIS_URL")), - workers=Worker.all(), + workers=Worker.all(connection=current_app.redis_conn), rq_url_prefix=url_for(".queues_overview"), rq_dashboard_version=rq_dashboard_version, rq_version=rq_version, @@ -440,7 +440,7 @@ def requeue_job_view(job_id): @blueprint.route("/requeue/", methods=["GET", "POST"]) @jsonify def requeue_all(queue_name): - fq = Queue(queue_name, serializer=config.serializer).failed_job_registry + fq = Queue(queue_name, serializer=config.serializer, connection=current_app.redis_conn).failed_job_registry job_ids = fq.get_job_ids() count = len(job_ids) for job_id in job_ids: @@ -453,30 +453,30 @@ def requeue_all(queue_name): @jsonify def empty_queue(queue_name, registry_name): if registry_name == "queued": - q = Queue(queue_name, serializer=config.serializer) + q = Queue(queue_name, serializer=config.serializer, connection=current_app.redis_conn) q.empty() elif registry_name == "failed": - registry = FailedJobRegistry(queue_name) + registry = FailedJobRegistry(queue_name, connection=current_app.redis_conn) for id in registry.get_job_ids(): delete_job_view(id, registry) elif registry_name == "deferred": - registry = DeferredJobRegistry(queue_name) + registry = DeferredJobRegistry(queue_name, connection=current_app.redis_conn) for id in registry.get_job_ids(): delete_job_view(id, registry) elif registry_name == "started": - registry = StartedJobRegistry(queue_name) + registry = StartedJobRegistry(queue_name, connection=current_app.redis_conn) for id in registry.get_job_ids(): delete_job_view(id, registry) elif registry_name == "finished": - registry = FinishedJobRegistry(queue_name) + registry = FinishedJobRegistry(queue_name, connection=current_app.redis_conn) for id in registry.get_job_ids(): delete_job_view(id, registry) elif registry_name == "canceled": - registry = CanceledJobRegistry(queue_name) + registry = CanceledJobRegistry(queue_name, connection=current_app.redis_conn) for id in registry.get_job_ids(): delete_job_view(id, registry) elif registry_name == "scheduled": - registry = ScheduledJobRegistry(queue_name) + registry = ScheduledJobRegistry(queue_name, connection=current_app.redis_conn) for id in registry.get_job_ids(): delete_job_view(id, registry) @@ -486,7 +486,7 @@ def empty_queue(queue_name, registry_name): @blueprint.route("/queue//compact", methods=["POST"]) @jsonify def compact_queue(queue_name): - q = Queue(queue_name, serializer=config.serializer) + q = Queue(queue_name, serializer=config.serializer, connection=current_app.redis_conn) q.compact() return dict(status="OK") @@ -600,7 +600,7 @@ def list_jobs(instance_number, queue_name, registry_name, per_page, order, page) @blueprint.route("//data/job/.json") @jsonify def job_info(instance_number, job_id): - job = Job.fetch(job_id, serializer=config.serializer) + job = Job.fetch(job_id, serializer=config.serializer, connection=current_app.redis_conn) result = dict( id=job.id, created_at=serialize_date(job.created_at), diff --git a/tests/test_basic.py b/tests/test_basic.py index af17340..757a165 100644 --- a/tests/test_basic.py +++ b/tests/test_basic.py @@ -10,7 +10,7 @@ HTTP_OK = 200 - +REGISTRY_NAMES = ["failed", "deferred", "scheduled", "queued", "started", "finished", "canceled"] class BasicTestCase(unittest.TestCase): redis_client = None @@ -81,11 +81,12 @@ def some_work(): self.assertEqual(response_del.status_code, HTTP_OK) def test_registry_jobs_list(self): - response = self.client.get('/0/data/jobs/default/failed/8/asc/1.json') - self.assertEqual(response.status_code, HTTP_OK) - data = json.loads(response.data.decode('utf8')) - self.assertIsInstance(data, dict) - self.assertIn('jobs', data) + for registry_name in REGISTRY_NAMES: + response = self.client.get(f'/0/data/jobs/default/{registry_name}/8/asc/1.json') + self.assertEqual(response.status_code, HTTP_OK) + data = json.loads(response.data.decode('utf8')) + self.assertIsInstance(data, dict) + self.assertIn('jobs', data) def test_worker_python_version_field(self): w = Worker(['q'], connection=self.app.redis_conn) @@ -143,6 +144,58 @@ def some_work(): data_dsc = json.loads(response_dsc.data.decode('utf8')) self.assertEqual(job_ids[::-1], [job['id'] for job in data_dsc['jobs']]) + def test_job_info(self): + def some_work(): + return + q = Queue(connection=self.app.redis_conn) + job = q.enqueue(some_work) + job_info_url = f'/0/data/job/{job.id}.json' + response_info = self.client.get(job_info_url) + self.assertEqual(response_info.status_code, HTTP_OK) + + def test_compact_queue(self): + def some_work(): + return + q = Queue(connection=self.app.redis_conn) + q.enqueue(some_work) + compact_queue_url = f"/queue/{q.name}/compact" + response_compact = self.client.post(compact_queue_url) + self.assertEqual(response_compact.status_code, HTTP_OK) + + def test_empty_queue(self): + for registry_name in REGISTRY_NAMES: + response = self.client.post(f'/queue/default/{registry_name}/empty') + self.assertEqual(response.status_code, HTTP_OK) + + def test_requeue_all(self): + def some_failing_work(): + raise Exception + q = Queue(connection=self.app.redis_conn) + job = q.enqueue(some_failing_work) + worker = Worker([q], connection=self.app.redis_conn) + worker.execute_job(job, q) + requeue_all_url = f"/requeue/{q.name}" + for method in (self.client.post, self.client.get): + response = method(requeue_all_url) + self.assertEqual(response.status_code, HTTP_OK) + + def test_requeue_one(self): + def some_failing_work(): + raise Exception + q = Queue(connection=self.app.redis_conn) + job = q.enqueue(some_failing_work) + worker = Worker([q], connection=self.app.redis_conn) + worker.execute_job(job, q) + requeue_one_url = f"/job/{job.id}/requeue" + response = self.client.post(requeue_one_url) + self.assertEqual(response.status_code, HTTP_OK) + + def test_workers_overview(self): + workers_overview_url = "/0/view/workers" + response = self.client.get(workers_overview_url) + self.assertEqual(response.status_code, HTTP_OK) + + __all__ = [ 'BasicTestCase', From 04e5c1b805b0d578d4d63f6f3a4d7c0d093f3ad1 Mon Sep 17 00:00:00 2001 From: Jan Vollmer Date: Tue, 29 Oct 2024 20:06:36 +0100 Subject: [PATCH 2/3] fix exc_info always showing "None" for jobs that didn't fail and remove more deprecated Job.exc_info references --- rq_dashboard/web.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/rq_dashboard/web.py b/rq_dashboard/web.py index b9528ea..e8ca7f9 100644 --- a/rq_dashboard/web.py +++ b/rq_dashboard/web.py @@ -210,7 +210,7 @@ def serialize_job(job: Job): id=job.id, created_at=serialize_date(job.created_at), ended_at=serialize_date(job.ended_at), - exc_info=str(latest_result.exc_string) if latest_result else None, + exc_info=latest_result.exc_string if latest_result else None, description=job.description, ) @@ -601,6 +601,7 @@ def list_jobs(instance_number, queue_name, registry_name, per_page, order, page) @jsonify def job_info(instance_number, job_id): job = Job.fetch(job_id, serializer=config.serializer, connection=current_app.redis_conn) + latest_result = job.latest_result() result = dict( id=job.id, created_at=serialize_date(job.created_at), @@ -609,7 +610,7 @@ def job_info(instance_number, job_id): origin=job.origin, status=job.get_status(), result=job.return_value(), - exc_info=str(job.exc_info) if job.exc_info else None, + exc_info=latest_result.exc_string if latest_result else None, description=job.description, metadata=json.dumps(job.get_meta()), ) From 13a7d6fbd34edb04320953cff82d4aeea6577d09 Mon Sep 17 00:00:00 2001 From: Jan Vollmer Date: Tue, 29 Oct 2024 20:36:38 +0100 Subject: [PATCH 3/3] enable tests in github actions --- .github/workflows/pr.yaml | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index ca59ef7..e176246 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -7,13 +7,20 @@ on: jobs: test: runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.9, "3.10", "3.11", "3.12", "3.13"] steps: - name: Checkout code - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 with: - python-version: '3.x' + python-version: ${{ matrix.python-version }} + - name: Start Redis + uses: supercharge/redis-github-action@1.8.0 + with: + redis-version: 7 - name: Install dependencies run: | python -m pip install --upgrade pip @@ -22,7 +29,6 @@ jobs: pip install -r requirements-dev.txt - name: Run tests - if: false run: pytest build: