diff --git a/src/dom_tokenizers/pre_tokenizers/pre_tokenizer.py b/src/dom_tokenizers/pre_tokenizers/pre_tokenizer.py index 1401b9d..75de19c 100644 --- a/src/dom_tokenizers/pre_tokenizers/pre_tokenizer.py +++ b/src/dom_tokenizers/pre_tokenizers/pre_tokenizer.py @@ -86,12 +86,15 @@ def _pre_tokenize_dom( buf = TokenBuffer() self.pre_tokenize_dom(buf, split.original) return buf.tokens - except Exception as e: + except Exception as e: # pragma: no cover logger.exception(f"{type(e).__name__} in pre-tokenizer:") raise @abstractmethod - def pre_tokenize_dom(self, buf: TokenBuffer, serialized: str): + def pre_tokenize_dom( + self, + buf: TokenBuffer, + serialized: str): # pragma: no cover """Transform a serialized DOM into a sequence of tokens. """ raise NotImplementedError