diff --git a/sickchill/oldbeard/classes.py b/sickchill/oldbeard/classes.py
index 54d2eec7b8..0b69ff0b19 100644
--- a/sickchill/oldbeard/classes.py
+++ b/sickchill/oldbeard/classes.py
@@ -48,7 +48,7 @@ def __init__(self, episodes):
# content
self.content = None
- self.resultType = ""
+ self.result_type = ""
self.priority = 0
@@ -103,7 +103,7 @@ class NZBSearchResult(SearchResult):
def __init__(self, episodes):
super().__init__(episodes)
- self.resultType = "nzb"
+ self.result_type = "nzb"
class NZBDataSearchResult(SearchResult):
@@ -113,7 +113,7 @@ class NZBDataSearchResult(SearchResult):
def __init__(self, episodes):
super().__init__(episodes)
- self.resultType = "nzbdata"
+ self.result_type = "nzbdata"
class TorrentSearchResult(SearchResult):
@@ -123,7 +123,7 @@ class TorrentSearchResult(SearchResult):
def __init__(self, episodes):
super().__init__(episodes)
- self.resultType = "torrent"
+ self.result_type = "torrent"
class Proper(object):
diff --git a/sickchill/oldbeard/clients/download_station.py b/sickchill/oldbeard/clients/download_station.py
index 1f09945eee..c1805a345f 100644
--- a/sickchill/oldbeard/clients/download_station.py
+++ b/sickchill/oldbeard/clients/download_station.py
@@ -122,12 +122,12 @@ def _get_destination(self, result):
"""
Determines which destination setting to use depending on result type
"""
- if result.resultType in (GenericProvider.NZB, GenericProvider.NZBDATA):
+ if result.result_type in (GenericProvider.NZB, GenericProvider.NZBDATA):
destination = settings.SYNOLOGY_DSM_PATH.strip()
- elif result.resultType == GenericProvider.TORRENT:
+ elif result.result_type == GenericProvider.TORRENT:
destination = settings.TORRENT_PATH.strip()
else:
- raise AttributeError("Invalid result passed to client when getting destination: resultType {}".format(result.resultType))
+ raise AttributeError("Invalid result passed to client when getting destination: result_type {}".format(result.result_type))
return re.sub(r"^/volume\d/", "", destination).lstrip("/")
@@ -137,9 +137,9 @@ def _set_destination(self, result, destination):
params: :destination: DSM share name
"""
destination = destination.strip()
- if result.resultType in (GenericProvider.NZB, GenericProvider.NZBDATA):
+ if result.result_type in (GenericProvider.NZB, GenericProvider.NZBDATA):
settings.SYNOLOGY_DSM_PATH = destination
- elif result.resultType == GenericProvider.TORRENT:
+ elif result.result_type == GenericProvider.TORRENT:
settings.TORRENT_PATH = destination
else:
raise AttributeError("Invalid result passed to client when setting destination")
@@ -163,7 +163,7 @@ def _check_destination(self, result):
logger.info("Destination set to %s", self._get_destination(result))
except (ValueError, KeyError, JSONDecodeError) as error:
logger.debug("Get DownloadStation default destination error: {0}".format(error))
- logger.warning("Could not get share destination from DownloadStation for {}, please set it in the settings", result.resultType)
+ logger.warning("Could not get share destination from DownloadStation for {}, please set it in the settings", result.result_type)
raise
def _add_torrent_uri(self, result):
@@ -197,7 +197,7 @@ def _add_torrent_file(self, result):
data = self._task_post_data
- result_type = result.resultType.replace("data", "")
+ result_type = result.result_type.replace("data", "")
files = {result_type: (".".join([result.name, result_type]), result.content)}
data["type"] = '"file"'
@@ -220,7 +220,7 @@ def sendNZB(self, result):
logger.warning("{0}: Authentication Failed".format(self.name))
return False
- if result.resultType == "nzb":
+ if result.result_type == "nzb":
return self._add_torrent_uri(result)
- elif result.resultType == "nzbdata":
+ elif result.result_type == "nzbdata":
return self._add_torrent_file(result)
diff --git a/sickchill/oldbeard/notifiers/emailnotify.py b/sickchill/oldbeard/notifiers/emailnotify.py
index 3db7bb5d57..17b07ece79 100644
--- a/sickchill/oldbeard/notifiers/emailnotify.py
+++ b/sickchill/oldbeard/notifiers/emailnotify.py
@@ -32,7 +32,7 @@ def notify_snatch(self, ep_name, title="Snatched:"):
title: The title of the notification (optional)
"""
if settings.USE_EMAIL and settings.EMAIL_NOTIFY_ONSNATCH:
- show = self._parseEp(ep_name)
+ show = self.parse_episode(ep_name)
to = self._generate_recipients(show)
if not to:
logger.debug("Skipping email notify because there are no configured recipients")
@@ -86,7 +86,7 @@ def notify_download(self, ep_name, title="Completed:"):
title: The title of the notification (optional)
"""
if settings.USE_EMAIL and settings.EMAIL_NOTIFY_ONDOWNLOAD:
- show = self._parseEp(ep_name)
+ show = self.parse_episode(ep_name)
to = self._generate_recipients(show)
if not to:
logger.debug("Skipping email notify because there are no configured recipients")
@@ -140,7 +140,7 @@ def notify_postprocess(self, ep_name, title="Postprocessed:"):
title: The title of the notification (optional)
"""
if settings.USE_EMAIL and settings.EMAIL_NOTIFY_ONPOSTPROCESS:
- show = self._parseEp(ep_name)
+ show = self.parse_episode(ep_name)
to = self._generate_recipients(show)
if not to:
logger.debug("Skipping email notify because there are no configured recipients")
@@ -194,7 +194,7 @@ def notify_subtitle_download(self, ep_name, lang, title="Downloaded subtitle:"):
lang: Subtitle language wanted
"""
if settings.USE_EMAIL and settings.EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD:
- show = self._parseEp(ep_name)
+ show = self.parse_episode(ep_name)
to = self._generate_recipients(show)
if not to:
logger.debug("Skipping email notify because there are no configured recipients")
@@ -386,7 +386,7 @@ def _sendmail(self, host, port, smtp_from, use_tls, user, pwd, to, msg, smtpDebu
return False
@staticmethod
- def _parseEp(ep_name):
+ def parse_episode(ep_name):
sep = " - "
titles = ep_name.split(sep)
logger.debug("TITLES: {0}".format(titles))
diff --git a/sickchill/oldbeard/nzbget.py b/sickchill/oldbeard/nzbget.py
index 797b49343a..3b69d2d417 100644
--- a/sickchill/oldbeard/nzbget.py
+++ b/sickchill/oldbeard/nzbget.py
@@ -73,7 +73,7 @@ def sendNZB(nzb, proper=False):
dupescore += 10
nzbcontent64 = None
- if nzb.resultType == "nzbdata":
+ if nzb.result_type == "nzbdata":
data = nzb.extraInfo[0]
nzbcontent64 = standard_b64encode(data)
@@ -88,7 +88,7 @@ def sendNZB(nzb, proper=False):
if nzbcontent64:
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", category, addToTop, nzbcontent64)
else:
- if nzb.resultType == "nzb":
+ if nzb.result_type == "nzb":
if not nzb.provider.login():
return False
diff --git a/sickchill/oldbeard/postProcessor.py b/sickchill/oldbeard/postProcessor.py
index 18f649432e..a14f52873a 100644
--- a/sickchill/oldbeard/postProcessor.py
+++ b/sickchill/oldbeard/postProcessor.py
@@ -1191,7 +1191,7 @@ def process(self):
# If any notification fails, don't stop postProcessor
try:
# send notifications
- notifiers.notify_download(episode_object._format_pattern("%SN - %Sx%0E - %EN - %QN"))
+ notifiers.notify_download(episode_object.format_pattern("%SN - %Sx%0E - %EN - %QN"))
# do the library update for KODI
notifiers.kodi_notifier.update_library(episode_object.show.name)
diff --git a/sickchill/oldbeard/properFinder.py b/sickchill/oldbeard/properFinder.py
index afe3c87633..004a9e6792 100644
--- a/sickchill/oldbeard/properFinder.py
+++ b/sickchill/oldbeard/properFinder.py
@@ -12,7 +12,7 @@
from . import db, helpers
from .common import cpu_presets, DOWNLOADED, Quality, SNATCHED, SNATCHED_PROPER
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
-from .search import pickBestResult, snatchEpisode
+from .search import pick_best_result, snatch_episode
class ProperFinder(object):
@@ -127,7 +127,7 @@ def _getProperList(self):
curProper.content = None
# filter release
- bestResult = pickBestResult(curProper, parse_result.show)
+ bestResult = pick_best_result(curProper, parse_result.show)
if not bestResult:
logger.debug("Proper " + curProper.name + " were rejected by our release filters.")
continue
@@ -234,7 +234,7 @@ def _downloadPropers(self, properList):
result.content = curProper.content
# snatch it
- snatchEpisode(result, SNATCHED_PROPER)
+ snatch_episode(result, SNATCHED_PROPER)
time.sleep(cpu_presets[settings.CPU_PRESET])
@staticmethod
diff --git a/sickchill/oldbeard/providers/newznab.py b/sickchill/oldbeard/providers/newznab.py
index 0e3e1dd374..f053d73c48 100644
--- a/sickchill/oldbeard/providers/newznab.py
+++ b/sickchill/oldbeard/providers/newznab.py
@@ -17,7 +17,7 @@ class NewznabProvider(NZBProvider, tvcache.RSSTorrentMixin):
Tested with: newznab, nzedb, spotweb, torznab
"""
- def __init__(self, name, url, key="0", catIDs="5030,5040", search_mode="eponly", search_fallback=False, enable_daily=True, enable_backlog=False):
+ def __init__(self, name, url, key="0", catIDs="5030,5040", search_mode="episode", search_fallback=False, enable_daily=True, enable_backlog=False):
super().__init__(name)
self.url = url
@@ -177,11 +177,11 @@ def get_newznab_categories(self, just_caps=False):
def _get_default_providers():
# name|url|key|catIDs|enabled|search_mode|search_fallback|enable_daily|enable_backlog
return (
- "NZB.Cat|https://nzb.cat/||5030,5040,5010|0|eponly|1|1|1!!!"
- + "NZBFinder.ws|https://nzbfinder.ws/||5030,5040,5010,5045|0|eponly|1|1|1!!!"
- + "NZBGeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0!!!"
- + "Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0!!!"
- + "DOGnzb|https://api.dognzb.cr/||5030,5040,5060,5070|0|eponly|0|1|1"
+ "NZB.Cat|https://nzb.cat/||5030,5040,5010|0|episode|1|1|1!!!"
+ + "NZBFinder.ws|https://nzbfinder.ws/||5030,5040,5010,5045|0|episode|1|1|1!!!"
+ + "NZBGeek|https://api.nzbgeek.info/||5030,5040|0|episode|0|0|0!!!"
+ + "Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|episode|0|0|0!!!"
+ + "DOGnzb|https://api.dognzb.cr/||5030,5040,5060,5070|0|episode|0|1|1"
)
def _check_auth(self):
@@ -222,7 +222,7 @@ def _make_provider(config):
enable_backlog = 0
enable_daily = 0
search_fallback = 0
- search_mode = "eponly"
+ search_mode = "episode"
try:
values = config.split("|")
@@ -239,6 +239,11 @@ def _make_provider(config):
logger.exception("Skipping Newznab provider string: '{0}', incorrect format".format(config))
return None
+ if search_mode == "sponly":
+ search_mode = "season"
+ elif search_mode == "eponly":
+ search_mode = "episode"
+
new_provider = NewznabProvider(
name,
url,
diff --git a/sickchill/oldbeard/providers/rsstorrent.py b/sickchill/oldbeard/providers/rsstorrent.py
index da180cdbbf..87980a7b88 100644
--- a/sickchill/oldbeard/providers/rsstorrent.py
+++ b/sickchill/oldbeard/providers/rsstorrent.py
@@ -10,7 +10,7 @@
class TorrentRssProvider(TorrentProvider):
- def __init__(self, name, url, cookies="", titleTAG="title", search_mode="eponly", search_fallback=False, enable_daily=False, enable_backlog=False):
+ def __init__(self, name, url, cookies="", titleTAG="title", search_mode="episode", search_fallback=False, enable_daily=False, enable_backlog=False):
super().__init__(name)
self.cache = TorrentRssCache(self, min_time=15)
@@ -85,7 +85,7 @@ def _make_provider(config):
enable_backlog = 0
enable_daily = 0
search_fallback = 0
- search_mode = "eponly"
+ search_mode = "episode"
title_tag = "title"
try:
diff --git a/sickchill/oldbeard/sab.py b/sickchill/oldbeard/sab.py
index 983d5c5550..8cb2f50248 100644
--- a/sickchill/oldbeard/sab.py
+++ b/sickchill/oldbeard/sab.py
@@ -42,11 +42,11 @@ def sendNZB(nzb): # pylint:disable=too-many-return-statements, too-many-branche
logger.info("Sending NZB to SABnzbd")
url = urljoin(settings.SAB_HOST, "api")
- if nzb.resultType == "nzb":
+ if nzb.result_type == "nzb":
params["mode"] = "addurl"
params["name"] = nzb.url
jdata = helpers.getURL(url, params=params, session=session, returns="json", verify=False)
- elif nzb.resultType == "nzbdata":
+ elif nzb.result_type == "nzbdata":
params["mode"] = "addfile"
multiPartParams = {"nzbfile": (nzb.name + ".nzb", nzb.extraInfo[0])}
jdata = helpers.getURL(url, params=params, files=multiPartParams, session=session, returns="json", verify=False)
diff --git a/sickchill/oldbeard/search.py b/sickchill/oldbeard/search.py
index b2e47ab39d..0574bbf131 100644
--- a/sickchill/oldbeard/search.py
+++ b/sickchill/oldbeard/search.py
@@ -13,13 +13,13 @@
from sickchill.show.History import History
if TYPE_CHECKING: # pragma: no cover
- from sickchill.oldbeard.classes import TorrentSearchResult
+ from sickchill.oldbeard.classes import SearchResult
from . import clients, common, db, helpers, notifiers, nzbget, nzbSplitter, sab, show_name_helpers, ui
from .common import MULTI_EP_RESULT, Quality, SEASON_RESULT, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER
-def _downloadResult(result: "TorrentSearchResult"):
+def _download_result(result: "SearchResult"):
"""
Downloads a result to the appropriate black hole folder.
@@ -27,47 +27,47 @@ def _downloadResult(result: "TorrentSearchResult"):
:return: boolean, True on success
"""
- resProvider = result.provider
- if resProvider is None:
+ result_was_downloaded = False
+
+ result_provider = result.provider
+ if result_provider is None:
logger.exception("Invalid provider name - this is a coding error, report it please")
- return False
+ return result_was_downloaded
- # nzbs/torrents with an URL can just be downloaded from the provider
- if result.resultType in (GenericProvider.NZB, GenericProvider.TORRENT):
- newResult = resProvider.download_result(result)
+ # nzbs/torrents with a URL can just be downloaded from the provider
+ if result.result_type in (GenericProvider.NZB, GenericProvider.TORRENT):
+ result_was_downloaded = result_provider.download_result(result)
# if it's an nzb data result
- elif result.resultType == GenericProvider.NZBDATA:
+ elif result.result_type == GenericProvider.NZBDATA:
# get the final file path to the nzb
filename = os.path.join(settings.NZB_DIR, f"{result.name}.nzb")
-
logger.info(f"Saving NZB to {filename}")
- newResult = True
-
# save the data to disk
try:
with open(filename, "w") as fileOut:
fileOut.write(result.extraInfo[0])
+ result_was_downloaded = True
helpers.chmodAsParent(filename)
except EnvironmentError as error:
logger.exception(f"Error trying to save NZB to black hole: {error}")
- newResult = False
+ result_was_downloaded = False
else:
logger.exception("Invalid provider type - this is a coding error, report it please")
- newResult = False
+ result_was_downloaded = False
- return newResult
+ return result_was_downloaded
-def snatchEpisode(result: "TorrentSearchResult", endStatus=SNATCHED):
+def snatch_episode(result: "SearchResult", end_status=SNATCHED):
"""
Contains the internal logic necessary to actually "snatch" a result that
has been found.
:param result: SearchResult instance to be snatched.
- :param endStatus: the episode status that should be used for the episode object once it's snatched.
+ :param end_status: the episode status that should be used for the episode object once it's snatched.
:return: boolean, True on success
"""
@@ -76,39 +76,45 @@ def snatchEpisode(result: "TorrentSearchResult", endStatus=SNATCHED):
if settings.ALLOW_HIGH_PRIORITY:
# if it aired recently make it high priority
- for curEp in result.episodes:
- if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
+ for episode in result.episodes:
+ if datetime.date.today() - episode.airdate <= datetime.timedelta(days=7):
result.priority = 1
- endStatus = SNATCHED_PROPER if re.search(r"\b(proper|repack|real)\b", result.name, re.I) else endStatus
+ end_status = SNATCHED_PROPER if re.search(r"\b(proper|repack|real)\b", result.name, re.I) else end_status
- # This is breaking if newznab protocol, expecting a torrent from a url and getting a magnet instead.
+ # This is breaking the newznab protocol when expecting a torrent from the url and getting a magnet instead.
if result.url and "jackett_apikey" in result.url:
response = result.provider.get_url(result.url, allow_redirects=False, returns="response")
if response.next and response.next.url and response.next.url.startswith("magnet"):
result.url = response.next.url
+ torznab: bool = hasattr(result.provider, "torznab") and result.provider.torznab or "jackett" in result.url
+ torznab |= result.url.startswith("magnet:") and re.search(r"urn:btih:(\w{32,40})", result.url)
+
+ if torznab or (result.url.startswith("magnet:") and re.search(r"urn:btih:(\w{32,40})", result.url)):
+ result.result_type = GenericProvider.TORRENT
+
# NZBs can be sent straight to SAB or saved to disk
- if result.resultType in (GenericProvider.NZB, GenericProvider.NZBDATA):
+ if result.result_type in (GenericProvider.NZB, GenericProvider.NZBDATA):
if settings.NZB_METHOD == "blackhole":
- dlResult = _downloadResult(result)
+ snatched_result = _download_result(result)
elif settings.NZB_METHOD == "sabnzbd":
- dlResult = sab.sendNZB(result)
+ snatched_result = sab.sendNZB(result)
elif settings.NZB_METHOD == "nzbget":
- is_proper = True if endStatus == SNATCHED_PROPER else False
- dlResult = nzbget.sendNZB(result, is_proper)
+ is_proper = True if end_status == SNATCHED_PROPER else False
+ snatched_result = nzbget.sendNZB(result, is_proper)
elif settings.NZB_METHOD == "download_station":
client = clients.getClientInstance(settings.NZB_METHOD)(settings.SYNOLOGY_DSM_HOST, settings.SYNOLOGY_DSM_USERNAME, settings.SYNOLOGY_DSM_PASSWORD)
- dlResult = client.sendNZB(result)
+ snatched_result = client.sendNZB(result)
else:
logger.exception(f"Unknown NZB action specified in config: {settings.NZB_METHOD}")
- dlResult = False
+ snatched_result = False
# Torrents can be sent to clients or saved to disk
- elif result.resultType == GenericProvider.TORRENT:
+ elif result.result_type == GenericProvider.TORRENT:
# torrents are saved to disk when blackhole mode
if settings.TORRENT_METHOD == "blackhole":
- dlResult = _downloadResult(result)
+ snatched_result = _download_result(result)
else:
if not result.content and not result.url.startswith("magnet"):
if result.provider.login():
@@ -116,16 +122,16 @@ def snatchEpisode(result: "TorrentSearchResult", endStatus=SNATCHED):
if result.content or result.url.startswith("magnet"):
client = clients.getClientInstance(settings.TORRENT_METHOD)()
- dlResult = client.sendTORRENT(result)
+ snatched_result = client.sendTORRENT(result)
else:
logger.warning("Torrent file content is empty")
History().logFailed(result.episodes, result.name, result.provider)
- dlResult = False
+ snatched_result = False
else:
- logger.exception(f"Unknown result type, unable to download it ({result.resultType})")
- dlResult = False
+ logger.exception(f"Unknown result type, unable to download it ({result.result_type})")
+ snatched_result = False
- if not dlResult:
+ if not snatched_result:
return False
ui.notifications.message("Episode snatched", result.name)
@@ -136,16 +142,17 @@ def snatchEpisode(result: "TorrentSearchResult", endStatus=SNATCHED):
trakt_data = []
for curEpObj in result.episodes:
with curEpObj.lock:
- if isFirstBestMatch(result):
+ if is_first_best_match(result):
curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
else:
- curEpObj.status = Quality.compositeStatus(endStatus, result.quality)
+ curEpObj.status = Quality.compositeStatus(end_status, result.quality)
sql_l.append(curEpObj.get_sql())
if curEpObj.status not in Quality.DOWNLOADED:
+ # noinspection PyBroadException
try:
- notifiers.notify_snatch(f"{curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')} from {result.provider.name}")
+ notifiers.notify_snatch(f"{curEpObj.naming_pattern('%SN - %Sx%0E - %EN - %QN')} from {result.provider.name}")
except Exception:
# Without this, when notification fail, it crashes the snatch thread and SC will
# keep snatching until notification is sent
@@ -167,7 +174,7 @@ def snatchEpisode(result: "TorrentSearchResult", endStatus=SNATCHED):
return True
-def pickBestResult(results, show):
+def pick_best_result(results, show):
"""
Find the best result out of a list of search results for a show
@@ -179,68 +186,68 @@ def pickBestResult(results, show):
logger.debug(f"Picking the best result out of {[x.name for x in results]}")
- bestResult = None
+ picked_result = None
# order the list so that preferred releases are at the top
results.sort(key=lambda ep: show_name_helpers.hasPreferredWords(ep.name, ep.show), reverse=True)
# find the best result for the current episode
- for cur_result in results:
- if show and cur_result.show is not show:
+ for result in results:
+ if show and result.show is not show:
continue
# build the black And white list
if show.is_anime:
- if not show.release_groups.is_valid(cur_result):
+ if not show.release_groups.is_valid(result):
continue
- logger.info(f"Quality of {cur_result.name} is {Quality.qualityStrings[cur_result.quality]}")
+ logger.info(f"Quality of {result.name} is {Quality.qualityStrings[result.quality]}")
- anyQualities, bestQualities = Quality.splitQuality(show.quality)
+ allowed_qualities, preferred_qualities = Quality.splitQuality(show.quality)
- if cur_result.quality not in anyQualities + bestQualities:
- logger.debug(f"{cur_result.name} is a quality we know we don't want, rejecting it")
+ if result.quality not in allowed_qualities + preferred_qualities:
+ logger.debug(f"{result.name} is a quality we know we don't want, rejecting it")
continue
- if not show_name_helpers.filter_bad_releases(cur_result.name, parse=False, show=show):
+ if not show_name_helpers.filter_bad_releases(result.name, parse=False, show=show):
continue
- if hasattr(cur_result, "size"):
- if settings.USE_FAILED_DOWNLOADS and History().hasFailed(cur_result.name, cur_result.size, cur_result.provider.name):
- logger.info(f"{cur_result.name} has previously failed, rejecting it")
+ if hasattr(result, "size"):
+ if settings.USE_FAILED_DOWNLOADS and History().hasFailed(result.name, result.size, result.provider.name):
+ logger.info(f"{result.name} has previously failed, rejecting it")
continue
- if not bestResult:
- bestResult = cur_result
- elif cur_result.quality in bestQualities and (bestResult.quality < cur_result.quality or bestResult.quality not in bestQualities):
- bestResult = cur_result
- elif cur_result.quality in anyQualities and bestResult.quality not in bestQualities and bestResult.quality < cur_result.quality:
- bestResult = cur_result
- elif bestResult.quality == cur_result.quality:
- if "proper" in cur_result.name.lower() or "real" in cur_result.name.lower() or "repack" in cur_result.name.lower():
- logger.info(f"Preferring {cur_result.name} (repack/proper/real over nuked)")
- bestResult = cur_result
- elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower():
- logger.info(f"Preferring {cur_result.name} (normal instead of internal)")
- bestResult = cur_result
- elif "xvid" in bestResult.name.lower() and "x264" in cur_result.name.lower():
- logger.info(f"Preferring {cur_result.name} (x264 over xvid)")
- bestResult = cur_result
-
- if bestResult:
- logger.debug(f"Picked {bestResult.name} as the best")
+ if not picked_result:
+ picked_result = result
+ elif result.quality in preferred_qualities and (picked_result.quality < result.quality or picked_result.quality not in preferred_qualities):
+ picked_result = result
+ elif result.quality in allowed_qualities and picked_result.quality not in preferred_qualities and picked_result.quality < result.quality:
+ picked_result = result
+ elif picked_result.quality == result.quality:
+ if "proper" in result.name.lower() or "real" in result.name.lower() or "repack" in result.name.lower():
+ logger.info(f"Preferring {result.name} (repack/proper/real over nuked)")
+ picked_result = result
+ elif "internal" in picked_result.name.lower() and "internal" not in result.name.lower():
+ logger.info(f"Preferring {result.name} (normal instead of internal)")
+ picked_result = result
+ elif "xvid" in picked_result.name.lower() and "x264" in result.name.lower():
+ logger.info(f"Preferring {result.name} (x264 over xvid)")
+ picked_result = result
+
+ if picked_result:
+ logger.debug(f"Picked {picked_result.name} as the best")
else:
logger.debug("No result picked.")
- return bestResult
+ return picked_result
-def isFinalResult(result: "TorrentSearchResult"):
+def is_final_result(result: "SearchResult"):
"""
Checks if the given result is good enough quality that we can stop searching for other ones.
:param result: quality to check
- :return: True if the result is the highest quality in both the any/best quality lists else False
+ :return: True if the result is the highest quality in both of the quality lists else False
"""
logger.debug(f"Checking if we should keep searching after we've found {result.name}")
@@ -269,9 +276,9 @@ def isFinalResult(result: "TorrentSearchResult"):
return False
-def isFirstBestMatch(result: "TorrentSearchResult"):
+def is_first_best_match(result: "SearchResult"):
"""
- Checks if the given result is a best quality match and if we want to stop searching providers here.
+ Checks if the given result is the best match and if we want to stop searching providers here.
:param result: to check
:return: True if the result is the best quality match else False
@@ -286,11 +293,11 @@ def isFirstBestMatch(result: "TorrentSearchResult"):
return result.quality in best_qualities if best_qualities else False
-def wantedEpisodes(show, fromDate):
+def wanted_episodes(show, from_date):
"""
Get a list of episodes that we want to download
:param show: Show these episodes are from
- :param fromDate: Search from a certain date
+ :param from_date: Search from a certain date
:return: list of wanted episodes
"""
wanted = []
@@ -305,55 +312,56 @@ def wantedEpisodes(show, fromDate):
con = db.DBConnection()
sql_results = con.select(
- "SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?", [show.indexerid, fromDate.toordinal()]
+ "SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?", [show.indexerid, from_date.toordinal()]
)
# check through the list of statuses to see if we want any
for result in sql_results:
- cur_status, cur_quality = common.Quality.splitCompositeStatus(int(result["status"] or -1))
- if cur_status not in {common.WANTED, common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER}:
+ status, quality = common.Quality.splitCompositeStatus(int(result["status"] or -1))
+ if status not in {common.WANTED, common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER}:
continue
- if cur_status != common.WANTED:
+ if status != common.WANTED:
if preferred_qualities:
- if cur_quality in preferred_qualities:
+ if quality in preferred_qualities:
continue
- elif cur_quality in allowed_qualities:
+ elif quality in allowed_qualities:
continue
- epObj = show.getEpisode(result["season"], result["episode"])
- epObj.wantedQuality = [i for i in all_qualities if i > cur_quality and i != common.Quality.UNKNOWN]
- wanted.append(epObj)
+ episode_object = show.getEpisode(result["season"], result["episode"])
+ episode_object.wantedQuality = [i for i in all_qualities if i > quality and i != common.Quality.UNKNOWN]
+ wanted.append(episode_object)
return wanted
-def searchForNeededEpisodes():
+def search_for_needed_episodes():
"""
Check providers for details on wanted episodes
:return: episodes we have a search hit for
"""
- foundResults = {}
+ found_results = {}
- didSearch = False
+ did_search = False
show_list = settings.showList
- fromDate = datetime.date.min
+ from_date = datetime.date.min
episodes = []
for curShow in show_list:
if not curShow.paused:
sickchill.oldbeard.name_cache.build_name_cache(curShow)
- episodes.extend(wantedEpisodes(curShow, fromDate))
+ episodes.extend(wanted_episodes(curShow, from_date))
if not episodes:
- # nothing wanted so early out, ie: avoid whatever abritrarily
+ # nothing wanted so early out, ie: avoid whatever arbitrarily
# complex thing a provider cache update entails, for example,
# reading rss feeds
logger.info("No episodes needed.")
- return list(foundResults.values())
+ return list(found_results.values())
+ # noinspection DuplicatedCode
original_thread_name = threading.current_thread().name
providers = [x for x in sickchill.oldbeard.providers.sorted_provider_list(settings.RANDOMIZE_PROVIDERS) if x.is_active and x.enable_daily and x.can_daily]
@@ -364,7 +372,7 @@ def searchForNeededEpisodes():
for curProvider in providers:
threading.current_thread().name = f"{original_thread_name} :: [{curProvider.name}]"
try:
- curFoundResults = curProvider.search_rss(episodes)
+ found_rss_results = curProvider.search_rss(episodes)
except AuthException as error:
logger.warning(f"Authentication error: {error}")
continue
@@ -373,53 +381,55 @@ def searchForNeededEpisodes():
logger.debug(traceback.format_exc())
continue
- didSearch = True
+ did_search = True
# pick a single result for each episode, respecting existing results
- for curEp in curFoundResults:
- if not curEp.show or curEp.show.paused:
- logger.debug(f"Skipping {curEp.pretty_name} because the show is paused ")
+ for current_episode in found_rss_results:
+ if not current_episode.show or current_episode.show.paused:
+ logger.debug(f"Skipping {current_episode.pretty_name} because the show is paused ")
continue
- bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
+ best_result = pick_best_result(found_rss_results[current_episode], current_episode.show)
# if all results were rejected move on to the next episode
- if not bestResult:
- logger.debug(f"All found results for {curEp.pretty_name} were rejected.")
+ if not best_result:
+ logger.debug(f"All found results for {current_episode.pretty_name} were rejected.")
continue
- # if it's already in the list (from another provider) and the newly found quality is no better then skip it
- if curEp in foundResults and bestResult.quality <= foundResults[curEp].quality:
+ # if it's already in the list (from another provider) and the newly found quality is no better, then skip it
+ if current_episode in found_results and best_result.quality <= found_results[current_episode].quality:
continue
- foundResults[curEp] = bestResult
+ found_results[current_episode] = best_result
threading.current_thread().name = original_thread_name
- if not didSearch:
+ if not did_search:
logger.info("No NZB/Torrent providers found or enabled in the sickchill config for daily searches. Please check your settings.")
- return list(foundResults.values())
+ return list(found_results.values())
-def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
+# noinspection PyPep8Naming
+def search_providers(show, episodes, manual=False, downCurQuality=False):
"""
Walk providers for information on shows
:param show: Show we are looking for
:param episodes: Episodes we hope to find
- :param manualSearch: Boolean, is this a manual search?
+ :param manual: Boolean, is this a manual search?
:param downCurQuality: Boolean, should we re-download currently available quality file
:return: results for search
"""
- foundResults = {}
- finalResults = []
+ found_results = {}
+ final_results = []
- didSearch = False
+ did_search = False
# build name cache for show
sickchill.oldbeard.name_cache.build_name_cache(show)
+ # noinspection DuplicatedCode
original_thread_name = threading.current_thread().name
providers = [
@@ -438,26 +448,26 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
logger.debug(f"{show.name} is not an anime, skipping")
continue
- foundResults[curProvider.name] = {}
+ found_results[curProvider.name] = {}
- searchCount = 0
+ search_count = 0
search_mode = curProvider.search_mode
- # Always search for episode when manually searching when in sponly
- if search_mode == "sponly" and manualSearch is True:
- search_mode = "eponly"
+ # Always search for episode when manually searching when in season
+ if search_mode == "season" and manual is True:
+ search_mode = "episode"
while True:
- searchCount += 1
+ search_count += 1
logger.info(
_("Performing {episode_or_season} search for {show}").format(
- episode_or_season=(_("season pack"), _("episode"))[search_mode == "eponly"], show=show.name
+ episode_or_season=(_("season pack"), _("episode"))[search_mode == "episode"], show=show.name
)
)
try:
- searchResults = curProvider.find_search_results(show, episodes, search_mode, manualSearch, downCurQuality)
+ search_results = curProvider.find_search_results(show, episodes, search_mode, manual, downCurQuality)
except AuthException as error:
logger.warning(f"Authentication error: {error}")
break
@@ -466,105 +476,106 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
logger.debug(traceback.format_exc())
break
- didSearch = True
+ did_search = True
- if searchResults:
+ if search_results:
# make a list of all the results for this provider
- for curEp in searchResults:
- if curEp in foundResults[curProvider.name]:
- foundResults[curProvider.name][curEp] += searchResults[curEp]
+ for curEp in search_results:
+ if curEp in found_results[curProvider.name]:
+ found_results[curProvider.name][curEp] += search_results[curEp]
else:
- foundResults[curProvider.name][curEp] = searchResults[curEp]
+ found_results[curProvider.name][curEp] = search_results[curEp]
break
- elif searchCount == 2 or not curProvider.search_fallback:
+ elif search_count == 2 or not curProvider.search_fallback:
break
- if search_mode == "sponly":
+ if search_mode == "season":
logger.debug("Fallback episode search initiated")
- search_mode = "eponly"
+ search_mode = "episode"
else:
logger.debug("Fallback season pack search initiate")
- search_mode = "sponly"
+ search_mode = "season"
# skip to next provider if we have no results to process
- if not foundResults[curProvider.name]:
+ if not found_results[curProvider.name]:
continue
# pick the best season NZB
- bestSeasonResult = None
- if SEASON_RESULT in foundResults[curProvider.name]:
- bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show)
+ best_season_result = None
+ if SEASON_RESULT in found_results[curProvider.name]:
+ best_season_result = pick_best_result(found_results[curProvider.name][SEASON_RESULT], show)
highest_quality_overall = 0
- for cur_episode in foundResults[curProvider.name]:
- for cur_result in foundResults[curProvider.name][cur_episode]:
+ for cur_episode in found_results[curProvider.name]:
+ for cur_result in found_results[curProvider.name][cur_episode]:
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
highest_quality_overall = cur_result.quality
logger.debug(f"The highest quality of any match is {Quality.qualityStrings[highest_quality_overall]}")
# see if every episode is wanted
- if bestSeasonResult:
- searchedSeasons = {str(x.season) for x in episodes}
+ if best_season_result:
+ searched_seasons = {str(x.season) for x in episodes}
# get the quality of the season nzb
- seasonQual = bestSeasonResult.quality
- logger.info(f"The quality of the season {bestSeasonResult.provider.provider_type} is {Quality.qualityStrings[seasonQual]}")
+ season_quality = best_season_result.quality
+ logger.info(f"The quality of the season {best_season_result.provider.provider_type} is {Quality.qualityStrings[season_quality]}")
main_db_con = db.DBConnection()
- allEps = [
+ all_episodes = [
int(x["episode"])
for x in main_db_con.select(
- f"SELECT episode FROM tv_episodes WHERE showid = ? AND ( season IN ( {','.join(['?'] * len(searchedSeasons))} ) )",
- [show.indexerid] + list(searchedSeasons),
+ f"SELECT episode FROM tv_episodes WHERE showid = ? AND ( season IN ( {','.join(['?'] * len(searched_seasons))} ) )",
+ [show.indexerid] + list(searched_seasons),
)
]
- logger.info(f"Executed query: [SELECT episode FROM tv_episodes WHERE showid = {show.indexerid} AND season in {searchedSeasons}]")
- logger.debug(f"Episode list: {allEps}")
+ logger.info(f"Executed query: [SELECT episode FROM tv_episodes WHERE showid = {show.indexerid} AND season in {searched_seasons}]")
+ logger.debug(f"Episode list: {all_episodes}")
- allWanted = True
- anyWanted = False
- for curEpNum in allEps:
+ all_wanted = True
+ some_wanted = False
+ for curEpNum in all_episodes:
for season in (x.season for x in episodes):
- if not show.wantEpisode(season, curEpNum, seasonQual, downCurQuality):
- allWanted = False
+ if not show.wantEpisode(season, curEpNum, season_quality, downCurQuality):
+ all_wanted = False
else:
- anyWanted = True
+ some_wanted = True
- # if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
- if allWanted and bestSeasonResult.quality == highest_quality_overall:
- logger.info(f"Every ep in this season is needed, downloading the whole {bestSeasonResult.provider.provider_type} {bestSeasonResult.name}")
- epObjs = []
- for curEpNum in allEps:
+ # if we need every ep in the season and there's nothing better, then just download this and be done with it (unless single episodes are preferred)
+ if all_wanted and best_season_result.quality == highest_quality_overall:
+ logger.info(f"Every ep in this season is needed, downloading the whole {best_season_result.provider.provider_type} {best_season_result.name}")
+ episode_objects = []
+ for curEpNum in all_episodes:
for season in {x.season for x in episodes}:
- epObjs.append(show.getEpisode(season, curEpNum))
- bestSeasonResult.episodes = epObjs
+ episode_objects.append(show.getEpisode(season, curEpNum))
+ best_season_result.episodes = episode_objects
# Remove provider from thread name before return results
threading.current_thread().name = original_thread_name
- return [bestSeasonResult]
+ return [best_season_result]
- elif not anyWanted:
- logger.info(f"No eps from this season are wanted at this quality, ignoring the result of {bestSeasonResult.name}")
+ elif not some_wanted:
+ logger.info(f"No eps from this season are wanted at this quality, ignoring the result of {best_season_result.name}")
else:
- if bestSeasonResult.resultType != GenericProvider.TORRENT:
+ if best_season_result.result_type != GenericProvider.TORRENT:
logger.debug("Breaking apart the NZB and adding the individual ones to our results")
# if not, break it apart and add them as the lowest priority results
- individualResults = nzbSplitter.split_result(bestSeasonResult)
- for curResult in individualResults:
- if len(curResult.episodes) == 1:
- epNum = curResult.episodes[0].episode
- elif len(curResult.episodes) > 1:
- epNum = MULTI_EP_RESULT
-
- if epNum in foundResults[curProvider.name]:
- foundResults[curProvider.name][epNum].append(curResult)
+ split_results = nzbSplitter.split_result(best_season_result)
+ episode_number = -1
+ for current_result in split_results:
+ if len(current_result.episodes) == 1:
+ episode_number = current_result.episodes[0].episode
+ elif len(current_result.episodes) > 1:
+ episode_number = MULTI_EP_RESULT
+
+ if episode_number >= 0 and episode_number in found_results[curProvider.name]:
+ found_results[curProvider.name][episode_number].append(current_result)
else:
- foundResults[curProvider.name][epNum] = [curResult]
+ found_results[curProvider.name][episode_number] = [current_result]
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
else:
@@ -572,110 +583,110 @@ def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):
logger.info(
"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!"
)
- epObjs = []
- for curEpNum in allEps:
+ episode_objects = []
+ for curEpNum in all_episodes:
for season in {x.season for x in episodes}:
- epObjs.append(show.getEpisode(season, curEpNum))
- bestSeasonResult.episodes = epObjs
+ episode_objects.append(show.getEpisode(season, curEpNum))
+ best_season_result.episodes = episode_objects
- if MULTI_EP_RESULT in foundResults[curProvider.name]:
- foundResults[curProvider.name][MULTI_EP_RESULT].append(bestSeasonResult)
+ if MULTI_EP_RESULT in found_results[curProvider.name]:
+ found_results[curProvider.name][MULTI_EP_RESULT].append(best_season_result)
else:
- foundResults[curProvider.name][MULTI_EP_RESULT] = [bestSeasonResult]
+ found_results[curProvider.name][MULTI_EP_RESULT] = [best_season_result]
# go through multi-ep results and see if we really want them or not, get rid of the rest
- multiResults = {}
- if MULTI_EP_RESULT in foundResults[curProvider.name]:
- for _multiResult in foundResults[curProvider.name][MULTI_EP_RESULT]:
+ multi_results = {}
+ if MULTI_EP_RESULT in found_results[curProvider.name]:
+ for _multiResult in found_results[curProvider.name][MULTI_EP_RESULT]:
logger.debug(f"Seeing if we want to bother with multi-episode result {_multiResult.name}")
# Filter result by ignore/required/whitelist/blacklist/quality, etc
- multiResult = pickBestResult(_multiResult, show)
- if not multiResult:
+ multi_result = pick_best_result(_multiResult, show)
+ if not multi_result:
continue
# see how many of the eps that this result covers aren't covered by single results
- neededEps = []
- notNeededEps = []
- for epObj in multiResult.episodes:
+ needed_episodes = []
+ unneeded_episodes = []
+ for epObj in multi_result.episodes:
# if we have results for the episode
- if epObj.episode in foundResults[curProvider.name] and len(foundResults[curProvider.name][epObj.episode]) > 0:
- notNeededEps.append(epObj.episode)
+ if epObj.episode in found_results[curProvider.name] and len(found_results[curProvider.name][epObj.episode]) > 0:
+ unneeded_episodes.append(epObj.episode)
else:
- neededEps.append(epObj.episode)
+ needed_episodes.append(epObj.episode)
- logger.debug(f"Single-ep check result is neededEps: {neededEps}, notNeededEps: {notNeededEps}")
+ logger.debug(f"Single-ep check result is needed_episodes: {needed_episodes}, unneeded_episodes: {unneeded_episodes}")
- if not neededEps:
+ if not needed_episodes:
logger.debug("All of these episodes were covered by single episode results, ignoring this multi-episode result")
continue
# check if these eps are already covered by another multi-result
- multiNeededEps = []
- multiNotNeededEps = []
- for epObj in multiResult.episodes:
- if epObj.episode in multiResults:
- multiNotNeededEps.append(epObj.episode)
+ needed_multiepisodes = []
+ unneeded_multiepisodes = []
+ for epObj in multi_result.episodes:
+ if epObj.episode in multi_results:
+ unneeded_multiepisodes.append(epObj.episode)
else:
- multiNeededEps.append(epObj.episode)
+ needed_multiepisodes.append(epObj.episode)
- logger.debug(f"Multi-ep check result is multiNeededEps: {multiNeededEps}, multiNotNeededEps: {multiNotNeededEps}")
+ logger.debug(f"Multi-ep check result is needed_multiepisodes: {needed_multiepisodes}, unneeded_multiepisodes: {unneeded_multiepisodes}")
- if not multiNeededEps:
+ if not needed_multiepisodes:
logger.debug("All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result")
continue
# don't bother with the single result if we're going to get it with a multi result
- for epObj in multiResult.episodes:
- multiResults[epObj.episode] = multiResult
- if epObj.episode in foundResults[curProvider.name]:
+ for epObj in multi_result.episodes:
+ multi_results[epObj.episode] = multi_result
+ if epObj.episode in found_results[curProvider.name]:
logger.debug(
"A needed multi-episode result overlaps with a single-episode result for ep #"
+ f"{epObj.episode}"
+ ", removing the single-episode results from the list"
)
- del foundResults[curProvider.name][epObj.episode]
+ del found_results[curProvider.name][epObj.episode]
# of all the single ep results narrow it down to the best one for each episode
- finalResults += set(multiResults.values())
- for curEp in foundResults[curProvider.name]:
+ final_results += set(multi_results.values())
+ for curEp in found_results[curProvider.name]:
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
continue
- if not foundResults[curProvider.name][curEp]:
+ if not found_results[curProvider.name][curEp]:
continue
# if all results were rejected move on to the next episode
- bestResult = pickBestResult(foundResults[curProvider.name][curEp], show)
- if not bestResult:
+ best_result = pick_best_result(found_results[curProvider.name][curEp], show)
+ if not best_result:
continue
- # add result if its not a duplicate and
+ # add result if it's not a duplicate and
found = False
- for i, result in enumerate(finalResults):
- for bestResultEp in bestResult.episodes:
+ for i, result in enumerate(final_results):
+ for bestResultEp in best_result.episodes:
if bestResultEp in result.episodes:
- if result.quality < bestResult.quality:
- finalResults.pop(i)
+ if result.quality < best_result.quality:
+ final_results.pop(i)
else:
found = True
if not found:
- finalResults += [bestResult]
+ final_results += [best_result]
# check that we got all the episodes we wanted first before doing a match and snatch
- wantedEpCount = 0
- for wantedEp in episodes:
- for result in finalResults:
- if wantedEp in result.episodes and isFinalResult(result):
- wantedEpCount += 1
+ wanted_episode_count = 0
+ for wanted_episode in episodes:
+ for result in final_results:
+ if wanted_episode in result.episodes and is_final_result(result):
+ wanted_episode_count += 1
# make sure we search every provider for results unless we found everything we wanted
- if wantedEpCount == len(episodes):
+ if wanted_episode_count == len(episodes):
break
- if not didSearch:
+ if not did_search:
logger.info("No NZB/Torrent providers found or enabled in the sickchill config for backlog searches. Please check your settings.")
# Remove provider from thread name before return results
threading.current_thread().name = original_thread_name
- return finalResults
+ return final_results
diff --git a/sickchill/oldbeard/search_queue.py b/sickchill/oldbeard/search_queue.py
index d22171619d..1a39b3a6a0 100644
--- a/sickchill/oldbeard/search_queue.py
+++ b/sickchill/oldbeard/search_queue.py
@@ -126,7 +126,7 @@ def run(self):
try:
logger.info("Beginning daily search for new episodes")
- found_results = search.searchForNeededEpisodes()
+ found_results = search.search_for_needed_episodes()
if not found_results:
logger.info("No needed episodes found")
@@ -134,7 +134,7 @@ def run(self):
for result in found_results:
# just use the first result for now
logger.info(f"Downloading {result.name} from {result.provider.name}")
- self.success = search.snatchEpisode(result)
+ self.success = search.snatch_episode(result)
# give the CPU a break
time.sleep(common.cpu_presets[settings.CPU_PRESET])
@@ -166,12 +166,12 @@ def run(self):
logger.info(f"Beginning manual search for: [{self.segment.pretty_name}]")
self.started = True
- search_result = search.searchProviders(self.show, [self.segment], True, self.downCurQuality)
+ search_result = search.search_providers(self.show, [self.segment], True, self.downCurQuality)
if search_result:
# just use the first result for now
logger.info(f"Downloading {search_result[0].name} from {search_result[0].provider.name}")
- self.success = search.snatchEpisode(search_result[0])
+ self.success = search.snatch_episode(search_result[0])
# give the CPU a break
time.sleep(common.cpu_presets[settings.CPU_PRESET])
@@ -209,13 +209,13 @@ def run(self):
if not self.show.paused:
try:
logger.info(f"Beginning backlog search for: [{self.show.name}]")
- searchResult = search.searchProviders(self.show, self.segment, False)
+ searchResult = search.search_providers(self.show, self.segment, False)
if searchResult:
for result in searchResult:
# just use the first result for now
logger.info(f"Downloading {result.name} from {result.provider.name}")
- search.snatchEpisode(result)
+ search.snatch_episode(result)
# give the CPU a break
time.sleep(common.cpu_presets[settings.CPU_PRESET])
@@ -279,15 +279,15 @@ def run(self):
History().markFailed(epObj)
logger.info(f"Beginning failed download search for: [{epObj.pretty_name}]")
- # If it is wanted, self.downCurQuality doesnt matter
- # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to!
- search_result = search.searchProviders(self.show, self.segment, True)
+ # If it is wanted, self.downCurQuality doesn't matter
+ # if it isn't wanted, we need to make sure to not overwrite the existing ep that we reverted to!
+ search_result = search.search_providers(self.show, self.segment, True)
if search_result:
for result in search_result:
# just use the first result for now
logger.info(f"Downloading {result.name} from {result.provider.name}")
- search.snatchEpisode(result)
+ search.snatch_episode(result)
# give the CPU a break
time.sleep(common.cpu_presets[settings.CPU_PRESET])
diff --git a/sickchill/oldbeard/show_queue.py b/sickchill/oldbeard/show_queue.py
index 763145db9f..87373d264c 100644
--- a/sickchill/oldbeard/show_queue.py
+++ b/sickchill/oldbeard/show_queue.py
@@ -728,7 +728,7 @@ def run(self):
try:
# TODO: episode_object is undefined here, so all of these will fail.
# send notifications
- # notifiers.notify_download(episode_object._format_pattern('%SN - %Sx%0E - %EN - %QN'))
+ # notifiers.notify_download(episode_object.format_pattern('%SN - %Sx%0E - %EN - %QN'))
# do the library update for KODI
notifiers.kodi_notifier.update_library(self.show.name)
diff --git a/sickchill/providers/GenericProvider.py b/sickchill/providers/GenericProvider.py
index cacbb08176..49edf11cfe 100644
--- a/sickchill/providers/GenericProvider.py
+++ b/sickchill/providers/GenericProvider.py
@@ -141,15 +141,15 @@ def find_search_results(self, show, episodes, search_mode, manual_search=False,
continue
- if len(episodes) > 1 and search_mode == "sponly" and searched_scene_season == episode.scene_season:
+ if len(episodes) > 1 and search_mode == "season" and searched_scene_season == episode.scene_season:
continue
search_strings = []
searched_scene_season = episode.scene_season
- if len(episodes) > 1 and search_mode == "sponly":
+ if len(episodes) > 1 and search_mode == "season":
search_strings = self.get_season_search_strings(episode)
- elif search_mode == "eponly":
+ elif search_mode == "episode":
search_strings = self.get_episode_search_strings(episode)
for search_string in search_strings:
@@ -199,7 +199,7 @@ def find_search_results(self, show, episodes, search_mode, manual_search=False,
actual_season = -1
if not (show_object.air_by_date or show_object.sports):
- if search_mode == "sponly":
+ if search_mode == "season":
if parse_result.episode_numbers:
logger.debug(f"This is supposed to be a season pack search but the result {title} is not a valid season pack, skipping it")
skip_release = True
diff --git a/sickchill/providers/metadata/tivo.py b/sickchill/providers/metadata/tivo.py
index 86ee49b35b..01e2dcde35 100644
--- a/sickchill/providers/metadata/tivo.py
+++ b/sickchill/providers/metadata/tivo.py
@@ -133,11 +133,11 @@ def episode_pretty_title(self, episode_object: "TVEpisode"):
"""
if episode_object.show.anime and not episode_object.show.scene:
- return episode_object._format_pattern("%AB - %EN")
+ return episode_object.naming_pattern("%AB - %EN")
elif episode_object.show.air_by_date:
- return episode_object._format_pattern("%AD - %EN")
+ return episode_object.naming_pattern("%AD - %EN")
- return episode_object._format_pattern("S%0SE%0E - %EN")
+ return episode_object.naming_pattern("S%0SE%0E - %EN")
def _ep_data(self, episode_object: "TVEpisode"):
"""
diff --git a/sickchill/providers/nzb/NZBProvider.py b/sickchill/providers/nzb/NZBProvider.py
index b1cdde288c..256d72632d 100644
--- a/sickchill/providers/nzb/NZBProvider.py
+++ b/sickchill/providers/nzb/NZBProvider.py
@@ -18,7 +18,7 @@ def is_active(self):
def _get_result(self, episodes):
result = NZBSearchResult(episodes)
if self.torznab or result.url.startswith("magnet") or result.url.endswith("torrent"):
- result.resultType = GenericProvider.TORRENT
+ result.result_type = GenericProvider.TORRENT
return result
diff --git a/sickchill/show/History.py b/sickchill/show/History.py
index 7a3e96079d..f5e9f08e72 100644
--- a/sickchill/show/History.py
+++ b/sickchill/show/History.py
@@ -74,7 +74,8 @@ def get(self, limit: int = 100, action: str = None):
"FROM history h, tv_shows s "
"WHERE h.showid = s.indexer_id "
)
- filter_sql = "AND action in (" + ",".join(["?"] * len(actions)) + ") "
+ replacements = ",".join(["?"] * len(actions))
+ filter_sql = f"AND action IN ({replacements})"
order_sql = "ORDER BY date DESC "
if limit == 0:
diff --git a/sickchill/start.py b/sickchill/start.py
index ff90bdb51f..156424e9a9 100644
--- a/sickchill/start.py
+++ b/sickchill/start.py
@@ -813,7 +813,7 @@ def path_leaf(path):
if hasattr(curProvider, "freeleech"):
curProvider.freeleech = check_setting_bool(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_freeleech"))
if hasattr(curProvider, "search_mode"):
- curProvider.search_mode = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_search_mode"), "eponly")
+ curProvider.search_mode = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_search_mode"), "episode")
if hasattr(curProvider, "search_fallback"):
curProvider.search_fallback = check_setting_bool(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_search_fallback"))
if hasattr(curProvider, "enable_daily"):
diff --git a/sickchill/tv.py b/sickchill/tv.py
index e6cc4e6a3d..5eb2a5d256 100644
--- a/sickchill/tv.py
+++ b/sickchill/tv.py
@@ -14,6 +14,7 @@
import imdb
from imdb import Cinemagoer
+from typing_extensions import Union
from unidecode import unidecode
from urllib3.exceptions import MaxRetryError, NewConnectionError
@@ -247,7 +248,7 @@ def getAllEpisodes(self, season=None, has_location=False):
return ep_list
- def getEpisode(self, season=None, episode=None, ep_file=None, noCreate=False, absolute_number=None) -> "TVEpisode":
+ def getEpisode(self, season=None, episode=None, ep_file=None, noCreate=False, absolute_number=None) -> Union["TVEpisode", None]:
season = try_int(season, None)
episode = try_int(episode, None)
absolute_number = try_int(absolute_number, None)
@@ -2028,9 +2029,9 @@ def createStrings(self, pattern=None):
strings = []
if not pattern:
for p in patterns:
- strings += [self._format_pattern(p)]
+ strings += [self.naming_pattern(p)]
return strings
- return self._format_pattern(pattern)
+ return self.naming_pattern(pattern)
@property
def pretty_name(self):
@@ -2042,11 +2043,11 @@ def pretty_name(self):
"""
if self.show.anime and not self.show.scene:
- return self._format_pattern("%SN - %AB - %EN")
+ return self.naming_pattern("%SN - %AB - %EN")
elif self.show.air_by_date:
- return self._format_pattern("%SN - %AD - %EN")
+ return self.naming_pattern("%SN - %AD - %EN")
- return self._format_pattern("%SN - S%0SE%0E - %EN")
+ return self.naming_pattern("%SN - S%0SE%0E - %EN")
def _ep_name(self):
"""
@@ -2239,7 +2240,7 @@ def _format_string(pattern, replace_map):
return result_name
- def _format_pattern(self, pattern=None, multi=None, anime_type=None):
+ def naming_pattern(self, pattern=None, multi=None, anime_type=None):
"""
Manipulates an episode naming pattern and then fills the template in
"""
@@ -2437,7 +2438,7 @@ def formatted_dir(self, pattern=None, multi=None, anime_type=None):
if len(name_groups) == 1:
return ""
else:
- return self._format_pattern(os.sep.join(name_groups[:-1]), multi, anime_type)
+ return self.naming_pattern(os.sep.join(name_groups[:-1]), multi, anime_type)
def formatted_filename(self, pattern=None, multi=None, anime_type=None):
"""
@@ -2458,7 +2459,7 @@ def formatted_filename(self, pattern=None, multi=None, anime_type=None):
# split off the dirs only, if they exist
name_groups = re.split(r"[\\/]", pattern)
- return sanitize_filename(self._format_pattern(name_groups[-1], multi, anime_type))
+ return sanitize_filename(self.naming_pattern(name_groups[-1], multi, anime_type))
def rename(self):
"""
diff --git a/sickchill/views/config/providers.py b/sickchill/views/config/providers.py
index f3789c0930..c72db2c0b8 100644
--- a/sickchill/views/config/providers.py
+++ b/sickchill/views/config/providers.py
@@ -142,7 +142,7 @@ def saveProviders(self):
newznab_provider_dict[current_id].catIDs = current_cat
# a 0 in the key spot indicates that no key is needed
newznab_provider_dict[current_id].needs_auth = current_key and current_key != "0"
- newznab_provider_dict[current_id].search_mode = self.get_body_argument(current_id + "_search_mode", "eponly")
+ newznab_provider_dict[current_id].search_mode = self.get_body_argument(current_id + "_search_mode", "episode")
newznab_provider_dict[current_id].search_fallback = config.checkbox_to_value(
self.get_body_argument(current_id + "search_fallback", 0), value_on=1, value_off=0
)
@@ -246,7 +246,7 @@ def saveProviders(self):
current_provider.check_set_option(self, "subtitle", False, cast=config.checkbox_to_value)
current_provider.check_set_option(self, "sorting", "seeders")
- current_provider.check_set_option(self, "search_mode", "eponly")
+ current_provider.check_set_option(self, "search_mode", "episode")
current_provider.check_set_option(self, "ratio", 0, cast=lambda x: max(try_int(x), -1))
diff --git a/sickchill/views/home.py b/sickchill/views/home.py
index 987807a8db..287d92e391 100644
--- a/sickchill/views/home.py
+++ b/sickchill/views/home.py
@@ -1632,12 +1632,12 @@ def manual_search_show_releases(self):
cache_db_con = db.DBConnection("cache.db", row_type="dict")
# show_object: TVShow = Show.find(settings.showList, show)
- # sickchill.oldbeard.search.searchProviders(
+ # sickchill.oldbeard.search.search_providers(
# show_object,
# show_object.getEpisode(season=season, episode=episode or 1),
# downCurQuality=True,
- # manualSearch=True,
- # manual_snatch=('sponly', 'eponly')[episode is not None]
+ # manual=True,
+ # manual_snatch=('season', 'episode')[episode is not None]
# )
if episode is not None:
@@ -1693,7 +1693,7 @@ def manual_snatch_show_release(self, *args, **kwargs):
if isinstance(result, str):
sickchill.logger.info(_("Could not snatch manually selected result: {result}").format(result=result))
elif isinstance(result, sickchill.oldbeard.classes.SearchResult):
- sickchill.oldbeard.search.snatchEpisode(result, SNATCHED_BEST)
+ sickchill.oldbeard.search.snatch_episode(result, SNATCHED_BEST)
return self.redirect("/home/displayShow?show=" + show)
diff --git a/tests/test_notifier.py b/tests/test_notifier.py
index cf045c90bc..d7f5357150 100644
--- a/tests/test_notifier.py
+++ b/tests/test_notifier.py
@@ -98,8 +98,8 @@ def test_email(self):
shows = self.legacy_shows + self.shows
for show in shows:
for episode in show.episodes:
- ep_name = episode._format_pattern("%SN - %Sx%0E - %EN - ") + episode.quality
- show_name = email_notifier._parseEp(ep_name)
+ ep_name = episode.naming_pattern("%SN - %Sx%0E - %EN - ") + episode.quality
+ show_name = email_notifier.parse_episode(ep_name)
recipients = email_notifier._generate_recipients(show_name)
self._debug_spew("- Email Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:")
for email in recipients:
@@ -189,7 +189,7 @@ def test_prowl(self):
# Now, iterate through all shows using the Prowl API generation routines that are used in the notifier proper
for show in self.shows:
for episode in show.episodes:
- ep_name = episode._format_pattern("%SN - %Sx%0E - %EN - ") + episode.quality
+ ep_name = episode.naming_pattern("%SN - %Sx%0E - %EN - ") + episode.quality
show_name = prowl_notifier._parse_episode(ep_name)
recipients = prowl_notifier._generate_recipients(show_name)
self._debug_spew("- Prowl Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:")
diff --git a/tests/test_snatch.py b/tests/test_snatch.py
index d29fc881a2..4ef00355fd 100644
--- a/tests/test_snatch.py
+++ b/tests/test_snatch.py
@@ -124,7 +124,7 @@ def do_test():
episode.status = common.WANTED
episode.saveToDB()
- best_result = search.searchProviders(show, episode.episode, force_search)
+ best_result = search.search_providers(show, episode.episode, force_search)
if not best_result:
assert cur_data["b"] == best_result