Skip to content

Commit

Permalink
Raise errors from threads in whitenoise.compress (#61)
Browse files Browse the repository at this point in the history
Co-authored-by: Adam Chainz <[email protected]>
  • Loading branch information
Archmonger and adamchainz authored Oct 28, 2024
1 parent 003d95d commit db9d198
Show file tree
Hide file tree
Showing 4 changed files with 51 additions and 32 deletions.
4 changes: 3 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ Using the following categories, list your changes in this order:

## [Unreleased]

- Nothing (yet)!
### Changed

- Any errors from threads in the `servestatic.compress` command are now raised.

## [2.1.1] - 2024-10-27

Expand Down
21 changes: 12 additions & 9 deletions src/servestatic/compress.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from __future__ import annotations

import argparse
import concurrent.futures
import gzip
import os
import re
from concurrent.futures import ThreadPoolExecutor, as_completed
from io import BytesIO

try:
Expand Down Expand Up @@ -123,12 +123,6 @@ def write_data(path, data, suffix, stat_result):
os.utime(filename, (stat_result.st_atime, stat_result.st_mtime))
return filename

def files_to_compress(self, root):
for dirpath, _dirs, files in os.walk(root):
for filename in files:
if self.should_compress(filename):
yield os.path.join(dirpath, filename)


def main(argv=None):
parser = argparse.ArgumentParser(
Expand Down Expand Up @@ -167,8 +161,17 @@ def main(argv=None):
quiet=args.quiet,
)

with concurrent.futures.ThreadPoolExecutor() as executor:
executor.map(compressor.compress, compressor.files_to_compress(args.root))
futures = []
with ThreadPoolExecutor() as executor:
for dirpath, _dirs, files in os.walk(args.root):
futures.extend(
executor.submit(compressor.compress, os.path.join(dirpath, filename))
for filename in files
if compressor.should_compress(filename)
)
# Trigger any errors
for future in as_completed(futures):
future.result()

return 0

Expand Down
47 changes: 25 additions & 22 deletions src/servestatic/storage.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from __future__ import annotations

import concurrent.futures
import contextlib
import errno
import json
import os
import re
import textwrap
from collections.abc import Iterator
from collections.abc import Generator, Iterator
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Any, Union

from django.conf import settings
Expand Down Expand Up @@ -37,20 +37,17 @@ def post_process(self, paths: dict[str, Any], dry_run: bool = False, **options:
extensions = getattr(settings, "SERVESTATIC_SKIP_COMPRESS_EXTENSIONS", None)
self.compressor = self.create_compressor(extensions=extensions, quiet=True)

to_compress = (path for path in paths if self.compressor.should_compress(path))
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = (executor.submit(self._compress_one, path) for path in to_compress)
for compressed_paths in concurrent.futures.as_completed(futures):
yield from compressed_paths.result()

def _compress_one(self, path: str) -> list[tuple[str, str, bool]]:
compressed: list[tuple[str, str, bool]] = []
full_path = self.path(path)
prefix_len = len(full_path) - len(path)
compressed.extend(
(path, compressed_path[prefix_len:], True) for compressed_path in self.compressor.compress(full_path)
)
return compressed
def _compress_path(path: str) -> Generator[tuple[str, str, bool]]:
full_path = self.path(path)
prefix_len = len(full_path) - len(path)
for compressed_path in self.compressor.compress(full_path):
compressed_name = compressed_path[prefix_len:]
yield (path, compressed_name, True)

with ThreadPoolExecutor() as executor:
futures = (executor.submit(_compress_path, path) for path in paths if self.compressor.should_compress(path))
for future in as_completed(futures):
yield from future.result()

def create_compressor(self, **kwargs: Any) -> Compressor: # noqa: PLR6301
return Compressor(**kwargs)
Expand Down Expand Up @@ -184,15 +181,21 @@ def delete_files(self, files_to_delete):
def create_compressor(self, **kwargs): # noqa: PLR6301
return Compressor(**kwargs)

def compress_files(self, names):
def compress_files(self, paths):
extensions = getattr(settings, "SERVESTATIC_SKIP_COMPRESS_EXTENSIONS", None)
self.compressor = self.create_compressor(extensions=extensions, quiet=True)

to_compress = (name for name in names if self.compressor.should_compress(name))
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = (executor.submit(self._compress_one, name) for name in to_compress)
for compressed_paths in concurrent.futures.as_completed(futures):
yield from compressed_paths.result()
def _compress_path(path: str) -> Generator[tuple[str, str]]:
full_path = self.path(path)
prefix_len = len(full_path) - len(path)
for compressed_path in self.compressor.compress(full_path):
compressed_name = compressed_path[prefix_len:]
yield (path, compressed_name)

with ThreadPoolExecutor() as executor:
futures = (executor.submit(_compress_path, path) for path in paths if self.compressor.should_compress(path))
for future in as_completed(futures):
yield from future.result()

def _compress_one(self, name: str) -> list[tuple[str, str]]:
compressed: list[tuple[str, str]] = []
Expand Down
11 changes: 11 additions & 0 deletions tests/test_compress.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import re
import shutil
import tempfile
from unittest import mock

import pytest

Expand Down Expand Up @@ -78,3 +79,13 @@ def test_compress():
def test_compressed_effectively_no_orig_size():
compressor = Compressor(quiet=True)
assert not compressor.is_compressed_effectively("test_encoding", "test_path", 0, "test_data")


def test_main_error(files_dir):
with (
pytest.raises(ValueError, match="woops") as excinfo,
mock.patch.object(Compressor, "compress", side_effect=ValueError("woops")),
):
compress_main([files_dir, "--quiet"])

assert excinfo.value.args == ("woops",)

0 comments on commit db9d198

Please sign in to comment.