diff options
author | morpheus65535 <[email protected]> | 2023-12-28 14:52:35 -0500 |
---|---|---|
committer | morpheus65535 <[email protected]> | 2023-12-28 14:52:35 -0500 |
commit | 9379d1b7f843e46b16483a0d54a17b4453b21590 (patch) | |
tree | e1152377d182b0a12547c282d8a273680b4069a4 /libs | |
parent | 965b0bcc7958335a87d4dc0293cf9b6313582384 (diff) | |
download | bazarr-9379d1b7f843e46b16483a0d54a17b4453b21590.tar.gz bazarr-9379d1b7f843e46b16483a0d54a17b4453b21590.zip |
Increased supersubtitles connection timeout and properly deal with invalid JSON returned from their API.
Diffstat (limited to 'libs')
-rw-r--r-- | libs/subliminal_patch/providers/supersubtitles.py | 16 |
1 files changed, 10 insertions, 6 deletions
diff --git a/libs/subliminal_patch/providers/supersubtitles.py b/libs/subliminal_patch/providers/supersubtitles.py index 80fc276c0..c3ecb06a3 100644 --- a/libs/subliminal_patch/providers/supersubtitles.py +++ b/libs/subliminal_patch/providers/supersubtitles.py @@ -189,7 +189,7 @@ class SuperSubtitlesProvider(Provider, ProviderSubtitleArchiveMixin): url = self.server_url + "index.php?tipus=adatlap&azon=a_" + str(sub_id) # url = https://www.feliratok.eu/index.php?tipus=adatlap&azon=a_1518600916 logger.info('Get IMDB id from URL %s', url) - r = self.session.get(url, timeout=10).content + r = self.session.get(url, timeout=30).content soup = ParserBeautifulSoup(r, ['lxml']) links = soup.find_all("a") @@ -220,13 +220,17 @@ class SuperSubtitlesProvider(Provider, ProviderSubtitleArchiveMixin): url = self.server_url + "index.php?term=" + series + "&nyelv=0&action=autoname" # url = self.server_url + "index.php?term=" + "fla"+ "&nyelv=0&action=autoname" logger.info('Get series id from URL %s', url) - r = self.session.get(url, timeout=10) + r = self.session.get(url, timeout=30) # r is something like this: # [{"name":"DC\u2019s Legends of Tomorrow (2016)","ID":"3725"},{"name":"Miles from Tomorrowland (2015)", # "ID":"3789"},{"name":"No Tomorrow (2016)","ID":"4179"}] - results = r.json() + try: + results = r.json() + except JSONDecodeError: + logger.error('Unable to parse returned JSON from URL %s', url) + return None # check all of the results: for result in results: @@ -374,7 +378,7 @@ class SuperSubtitlesProvider(Provider, ProviderSubtitleArchiveMixin): url += "&rtol=" + str(episode) try: - results = self.session.get(url, timeout=10).json() + results = self.session.get(url, timeout=30).json() except JSONDecodeError: # provider returned improper JSON results = None @@ -447,7 +451,7 @@ class SuperSubtitlesProvider(Provider, ProviderSubtitleArchiveMixin): subtitles = [] logger.info('URL for subtitles %s', url) - r = self.session.get(url, timeout=10).content + r = self.session.get(url, timeout=30).content soup = ParserBeautifulSoup(r, ['lxml']) tables = soup.find_all("table") @@ -537,7 +541,7 @@ class SuperSubtitlesProvider(Provider, ProviderSubtitleArchiveMixin): return subtitles def download_subtitle(self, subtitle): - r = self.session.get(subtitle.page_link, timeout=10) + r = self.session.get(subtitle.page_link, timeout=30) r.raise_for_status() archive = get_archive_from_bytes(r.content) |