summaryrefslogtreecommitdiffhomepage
path: root/libs/subliminal_patch/providers/opensubtitlescom.py
blob: ca90cf7aa7c8dfef6fc75d8d3d2995bb8e74eea7 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
# -*- coding: utf-8 -*-
import logging
import os
import time
import datetime

from requests import Session, ConnectionError, Timeout, ReadTimeout, RequestException
from requests.exceptions import JSONDecodeError
from subzero.language import Language

from babelfish import language_converters
from subliminal import Episode, Movie
from subliminal.score import get_equivalent_release_groups
from subliminal.utils import sanitize_release_group, sanitize
from subliminal_patch.exceptions import TooManyRequests, APIThrottled
from subliminal.exceptions import DownloadLimitExceeded, AuthenticationError, ConfigurationError, ServiceUnavailable, \
    ProviderError
from .mixins import ProviderRetryMixin
from subliminal_patch.subtitle import Subtitle
from subliminal.subtitle import fix_line_ending, SUBTITLE_EXTENSIONS
from subliminal_patch.providers import Provider
from subliminal_patch.subtitle import guess_matches
from subliminal_patch.utils import fix_inconsistent_naming
from subliminal.cache import region
from dogpile.cache.api import NO_VALUE
from guessit import guessit

logger = logging.getLogger(__name__)

SHOW_EXPIRATION_TIME = datetime.timedelta(weeks=1).total_seconds()
TOKEN_EXPIRATION_TIME = datetime.timedelta(hours=12).total_seconds()

retry_amount = 3


def fix_tv_naming(title):
    """Fix TV show titles with inconsistent naming using dictionary, but do not sanitize them.

    :param str title: original title.
    :return: new title.
    :rtype: str

    """
    return fix_inconsistent_naming(title, {"Superman & Lois": "Superman and Lois",
                                           }, True)


def fix_movie_naming(title):
    return fix_inconsistent_naming(title, {
    }, True)


custom_languages = {
    'pt': 'pt-PT',
    'zh': 'zh-CN',
}


def to_opensubtitlescom(lang):
    if lang in custom_languages.keys():
        return custom_languages[lang]
    else:
        return lang


def from_opensubtitlescom(lang):
    from_custom_languages = {v: k for k, v in custom_languages.items()}
    if lang in from_custom_languages.keys():
        return from_custom_languages[lang]
    else:
        return lang


class OpenSubtitlesComSubtitle(Subtitle):
    provider_name = 'opensubtitlescom'
    hash_verifiable = False

    def __init__(self, language, forced, hearing_impaired, page_link, file_id, releases, uploader, title, year,
                 hash_matched, file_hash=None, season=None, episode=None, imdb_match=False):
        super().__init__(language, hearing_impaired, page_link)
        language = Language.rebuild(language, hi=hearing_impaired, forced=forced)

        self.title = title
        self.year = year
        self.season = season
        self.episode = episode
        self.releases = releases
        self.release_info = releases
        self.language = language
        self.hearing_impaired = hearing_impaired
        self.forced = forced
        self.file_id = file_id
        self.page_link = page_link
        self.download_link = None
        self.uploader = uploader
        self.matches = None
        self.hash = file_hash
        self.encoding = 'utf-8'
        self.hash_matched = hash_matched
        self.imdb_match = imdb_match

    @property
    def id(self):
        return self.file_id

    def get_matches(self, video):
        matches = set()
        type_ = "movie" if isinstance(video, Movie) else "episode"

        # handle movies and series separately
        if type_ == "episode":
            # series
            matches.add('series')
            # season
            if video.season == self.season:
                matches.add('season')
            # episode
            if video.episode == self.episode:
                matches.add('episode')
            # imdb
            if self.imdb_match:
                matches.add('series_imdb_id')
        else:
            # title
            matches.add('title')
            # imdb
            if self.imdb_match:
                matches.add('imdb_id')

        # rest is same for both groups

        # year
        if video.year == self.year:
            matches.add('year')

        # release_group
        if (video.release_group and self.releases and
                any(r in sanitize_release_group(self.releases)
                    for r in get_equivalent_release_groups(sanitize_release_group(video.release_group)))):
            matches.add('release_group')

        if self.hash_matched:
            matches.add('hash')

        # other properties
        matches |= guess_matches(video, guessit(self.releases, {"type": type_}))

        self.matches = matches

        return matches


class OpenSubtitlesComProvider(ProviderRetryMixin, Provider):
    """OpenSubtitlesCom Provider"""
    server_url = 'https://api.opensubtitles.com/api/v1/'

    languages = {Language.fromopensubtitles(lang) for lang in language_converters['szopensubtitles'].codes}
    languages.update(set(Language.rebuild(lang, forced=True) for lang in languages))
    languages.update(set(Language.rebuild(l, hi=True) for l in languages))

    video_types = (Episode, Movie)

    def __init__(self, username=None, password=None, use_hash=True, api_key=None):
        if not all((username, password)):
            raise ConfigurationError('Username and password must be specified')

        if not api_key:
            raise ConfigurationError('Api_key must be specified')

        if not all((username, password)):
            raise ConfigurationError('Username and password must be specified')

        self.session = Session()
        self.session.headers = {'User-Agent': os.environ.get("SZ_USER_AGENT", "Sub-Zero/2"),
                                'Api-Key': api_key,
                                'Content-Type': 'application/json'}
        self.token = None
        self.username = username
        self.password = password
        self.video = None
        self.use_hash = use_hash
        self._started = None

    def initialize(self):
        self._started = time.time()
        self.login()

    def terminate(self):
        self.session.close()

    def ping(self):
        return self._started and (time.time() - self._started) < TOKEN_EXPIRATION_TIME

    def login(self):
        r = self.retry(
            lambda: checked(
                lambda: self.session.post(self.server_url + 'login',
                                          json={"username": self.username, "password": self.password},
                                          allow_redirects=False,
                                          timeout=30),
                validate_json=True,
                json_key_name='token'
            ),
            amount=retry_amount
        )

        self.token = r.json()['token']
        region.set("oscom_token", self.token)
        return

    @staticmethod
    def sanitize_external_ids(external_id):
        if isinstance(external_id, str):
            external_id = external_id.lower().lstrip('tt').lstrip('0')
        sanitized_id = external_id[:-1].lstrip('0') + external_id[-1]
        return int(sanitized_id)

    @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME)
    def search_titles(self, title):
        title_id = None

        parameters = {'query': title.lower()}
        logging.debug(f'Searching using this title: {title}')

        results = self.retry(
            lambda: checked(
                lambda: self.session.get(self.server_url + 'features', params=parameters, timeout=30),
                validate_token=True,
                validate_json=True,
                json_key_name='data'
            ),
            amount=retry_amount
        )

        if results == 401:
            logging.debug('Authentication failed: clearing cache and attempting to login.')
            region.delete("oscom_token")
            self.login()

            results = self.retry(
                lambda: checked(
                    lambda: self.session.get(self.server_url + 'features', params=parameters, timeout=30),
                    validate_json=True,
                    json_key_name='data'
                ),
                amount=retry_amount
            )

        # deserialize results
        results_dict = results.json()['data']

        # loop over results
        for result in results_dict:
            if 'title' in result['attributes']:
                if isinstance(self.video, Episode):
                    if fix_tv_naming(title).lower() == result['attributes']['title'].lower() and \
                            (not self.video.year or self.video.year == int(result['attributes']['year'])):
                        title_id = result['id']
                        break
                else:
                    if fix_movie_naming(title).lower() == result['attributes']['title'].lower() and \
                            (not self.video.year or self.video.year == int(result['attributes']['year'])):
                        title_id = result['id']
                        break
            else:
                continue

        if title_id:
            logging.debug(f'Found this title ID: {title_id}')
            return self.sanitize_external_ids(title_id)

        if not title_id:
            logger.debug(f'No match found for {title}')

    def query(self, languages, video):
        if region.get("oscom_token", expiration_time=TOKEN_EXPIRATION_TIME) is NO_VALUE:
            logger.debug("No cached token, we'll try to login again.")
            self.login()
        self.video = video
        if self.use_hash:
            file_hash = self.video.hashes.get('opensubtitlescom')
            logging.debug(f'Searching using this hash: {hash}')
        else:
            file_hash = None

        if isinstance(self.video, Episode):
            title = self.video.series
        else:
            title = self.video.title

        imdb_id = None
        if isinstance(self.video, Episode) and self.video.series_imdb_id:
            imdb_id = self.sanitize_external_ids(self.video.series_imdb_id)
        elif isinstance(self.video, Movie) and self.video.imdb_id:
            imdb_id = self.sanitize_external_ids(self.video.imdb_id)

        title_id = None
        if not imdb_id:
            title_id = self.search_titles(title)
            if not title_id:
                return []

        lang_strings = [to_opensubtitlescom(lang.basename) for lang in languages]
        langs = ','.join(lang_strings)
        logging.debug(f'Searching for this languages: {lang_strings}')

        # query the server
        if isinstance(self.video, Episode):
            res = self.retry(
                lambda: checked(
                    lambda: self.session.get(self.server_url + 'subtitles',
                                             params=(('ai_translated', 'exclude'),
                                                     ('episode_number', self.video.episode),
                                                     ('imdb_id', imdb_id if not title_id else None),
                                                     ('languages', langs.lower()),
                                                     ('machine_translated', 'exclude'),
                                                     ('moviehash', file_hash),
                                                     ('parent_feature_id', title_id if title_id else None),
                                                     ('season_number', self.video.season)),
                                             timeout=30),
                    validate_json=True,
                    json_key_name='data'
                ),
                amount=retry_amount
            )
        else:
            res = self.retry(
                lambda: checked(
                    lambda: self.session.get(self.server_url + 'subtitles',
                                             params=(('ai_translated', 'exclude'),
                                                     ('id', title_id if title_id else None),
                                                     ('imdb_id', imdb_id if not title_id else None),
                                                     ('languages', langs.lower()),
                                                     ('machine_translated', 'exclude'),
                                                     ('moviehash', file_hash)),
                                             timeout=30),
                    validate_json=True,
                    json_key_name='data'
                ),
                amount=retry_amount
            )

        subtitles = []

        result = res.json()

        # filter out forced subtitles or not depending on the required languages
        if all([lang.forced for lang in languages]):  # only forced
            result['data'] = [x for x in result['data'] if x['attributes']['foreign_parts_only']]
        elif any([lang.forced for lang in languages]):  # also forced
            pass
        else:  # not forced
            result['data'] = [x for x in result['data'] if not x['attributes']['foreign_parts_only']]

        logging.debug(f"Query returned {len(result['data'])} subtitles")

        if len(result['data']):
            for item in result['data']:
                # ignore AI translated subtitles
                if 'ai_translated' in item['attributes'] and item['attributes']['ai_translated']:
                    logging.debug("Skipping AI translated subtitles")
                    continue

                # ignore machine translated subtitles
                if 'machine_translated' in item['attributes'] and item['attributes']['machine_translated']:
                    logging.debug("Skipping machine translated subtitles")
                    continue

                if 'season_number' in item['attributes']['feature_details']:
                    season_number = item['attributes']['feature_details']['season_number']
                else:
                    season_number = None

                if 'episode_number' in item['attributes']['feature_details']:
                    episode_number = item['attributes']['feature_details']['episode_number']
                else:
                    episode_number = None

                if 'moviehash_match' in item['attributes']:
                    moviehash_match = item['attributes']['moviehash_match']
                else:
                    moviehash_match = False

                try:
                    year = int(item['attributes']['feature_details']['year'])
                except TypeError:
                    year = item['attributes']['feature_details']['year']

                if len(item['attributes']['files']):
                    subtitle = OpenSubtitlesComSubtitle(
                        language=Language.fromietf(from_opensubtitlescom(item['attributes']['language'])),
                        forced=item['attributes']['foreign_parts_only'],
                        hearing_impaired=item['attributes']['hearing_impaired'],
                        page_link=item['attributes']['url'],
                        file_id=item['attributes']['files'][0]['file_id'],
                        releases=item['attributes']['release'],
                        uploader=item['attributes']['uploader']['name'],
                        title=item['attributes']['feature_details']['movie_name'],
                        year=year,
                        season=season_number,
                        episode=episode_number,
                        hash_matched=moviehash_match,
                        imdb_match=True if imdb_id else False
                    )
                    subtitle.get_matches(self.video)
                    subtitles.append(subtitle)

        return subtitles

    def list_subtitles(self, video, languages):
        return self.query(languages, video)

    def download_subtitle(self, subtitle):
        if region.get("oscom_token", expiration_time=TOKEN_EXPIRATION_TIME) is NO_VALUE:
            logger.debug("No cached token, we'll try to login again.")
            self.login()
        if self.token is NO_VALUE:
            logger.debug("Unable to obtain an authentication token right now, we'll try again later.")
            raise ProviderError("Unable to obtain an authentication token")

        logger.info('Downloading subtitle %r', subtitle)

        headers = {'Accept': 'application/json', 'Content-Type': 'application/json',
                   'Authorization': 'Bearer ' + self.token}
        res = self.retry(
            lambda: checked(
                lambda: self.session.post(self.server_url + 'download',
                                          json={'file_id': subtitle.file_id, 'sub_format': 'srt'},
                                          headers=headers,
                                          timeout=30),
                validate_json=True,
                json_key_name='link'
            ),
            amount=retry_amount
        )

        download_data = res.json()
        subtitle.download_link = download_data['link']

        r = self.retry(
            lambda: checked(
                lambda: self.session.get(subtitle.download_link, timeout=30),
                validate_content=True
            ),
            amount=retry_amount
        )

        if not r:
            logger.debug(f'Could not download subtitle from {subtitle.download_link}')
            subtitle.content = None
            return
        else:
            subtitle_content = r.content
            subtitle.content = fix_line_ending(subtitle_content)


def checked(fn, raise_api_limit=False, validate_token=False, validate_json=False, json_key_name=None,
            validate_content=False):
    """Run :fn: and check the response status before returning it.

    :param fn: the function to make an API call to OpenSubtitles.com.
    :param raise_api_limit: if True we wait a little bit longer before running the call again.
    :param validate_token: test if token is valid and return 401 if not.
    :param validate_json: test if response is valid json.
    :param json_key_name: test if returned json contain a specific key.
    :param validate_content: test if response have a content (used with download).
    :return: the response.

    """
    response = None
    try:
        try:
            response = fn()
        except APIThrottled:
            if not raise_api_limit:
                logger.info("API request limit hit, waiting and trying again once.")
                time.sleep(2)
                return checked(fn, raise_api_limit=True)
            raise
        except (ConnectionError, Timeout, ReadTimeout):
            raise ServiceUnavailable(f'Unknown Error, empty response: {response.status_code}: {response}')
        except Exception:
            logging.exception('Unhandled exception raised.')
            raise ProviderError('Unhandled exception raised. Check log.')
        else:
            status_code = response.status_code
    except Exception:
        status_code = None
    else:
        if status_code == 400:
            raise ConfigurationError('Do not use email but username')
        elif status_code == 401:
            time.sleep(1)
            if validate_token:
                return 401
            else:
                log_request_response(response)
                raise AuthenticationError(f'Login failed: {response.reason}')
        elif status_code == 403:
            log_request_response(response)
            raise ProviderError("Bazarr API key seems to be in problem")
        elif status_code == 406:
            try:
                json_response = response.json()
                download_count = json_response['requests']
                remaining_download = json_response['remaining']
                quota_reset_time = json_response['reset_time']
            except JSONDecodeError:
                raise ProviderError('Invalid JSON returned by provider')
            else:
                log_request_response(response)
                raise DownloadLimitExceeded(f"Daily download limit reached. {download_count} subtitles have been "
                                            f"downloaded and {remaining_download} remaining subtitles can be "
                                            f"downloaded. Quota will be reset in {quota_reset_time}.")
        elif status_code == 410:
            log_request_response(response)
            raise ProviderError("Download as expired")
        elif status_code == 429:
            log_request_response(response)
            raise TooManyRequests()
        elif status_code == 500:
            logging.debug("Server side exception raised while downloading from opensubtitles.com website. They "
                          "should mitigate this soon.")
            return None
        elif status_code == 502:
            # this one should deal with Bad Gateway issue on their side.
            raise APIThrottled()
        elif 500 <= status_code <= 599:
            raise ProviderError(response.reason)

        if status_code != 200:
            log_request_response(response)
            raise ProviderError(f'Bad status code: {response.status_code}')

        if validate_json:
            try:
                json_test = response.json()
            except JSONDecodeError:
                raise ProviderError('Invalid JSON returned by provider')
            else:
                if json_key_name not in json_test:
                    raise ProviderError(f'Invalid JSON returned by provider: no {json_key_name} key in returned json.')

        if validate_content:
            if not hasattr(response, 'content'):
                logging.error('Download link returned no content attribute.')
                return False
            elif not response.content:
                logging.error(f'This download link returned empty content: {response.url}')
                return False

    return response


def log_request_response(response):
    logging.debug("opensubtitlescom returned a non standard response. Logging request/response for debugging purpose.")
    logging.debug(f"Request URL: {response.request.url}")
    logging.debug(f"Request Headers: {response.request.headers}")
    logging.debug(f"Request Body: {response.request.body}")
    logging.debug(f"Response Status Code: {response.status_code}")
    logging.debug(f"Response Headers: {response.headers}")
    logging.debug(f"Response Body: {response.text}")