diff options
author | Louis Vézina <[email protected]> | 2019-08-14 12:46:21 -0400 |
---|---|---|
committer | Louis Vézina <[email protected]> | 2019-08-14 12:46:21 -0400 |
commit | 2496660cc0874c3a0eb31ec8876dd5eb5086639e (patch) | |
tree | 77d4958d9810d08f96378e7f06ce24a93fd298ae /libs/apprise | |
parent | 3a1285e2da451ec0015954ea29c58deb3f08781a (diff) | |
download | bazarr-2496660cc0874c3a0eb31ec8876dd5eb5086639e.tar.gz bazarr-2496660cc0874c3a0eb31ec8876dd5eb5086639e.zip |
Updating Apprise to 0.7.9 in order to fix #478 and #515.
Diffstat (limited to 'libs/apprise')
28 files changed, 176 insertions, 4636 deletions
diff --git a/libs/apprise/Apprise.py b/libs/apprise/Apprise.py index ac20f014b..ee199c4b1 100644 --- a/libs/apprise/Apprise.py +++ b/libs/apprise/Apprise.py @@ -212,7 +212,7 @@ class Apprise(object): # Initialize our return status return_status = True - if isinstance(asset, AppriseAsset): + if asset is None: # prepare default asset asset = self.asset diff --git a/libs/apprise/AppriseLocale.py b/libs/apprise/AppriseLocale.py index 714d2016e..714e11804 100644 --- a/libs/apprise/AppriseLocale.py +++ b/libs/apprise/AppriseLocale.py @@ -30,6 +30,7 @@ import contextlib from os.path import join from os.path import dirname from os.path import abspath +from .logger import logger # Define our translation domain DOMAIN = 'apprise' @@ -207,6 +208,16 @@ class AppriseLocale(object): # Detect language lang = locale.getdefaultlocale()[0] + except ValueError as e: + # This occurs when an invalid locale was parsed from the + # environment variable. While we still return None in this + # case, we want to better notify the end user of this. Users + # receiving this error should check their environment + # variables. + logger.warning( + 'Language detection failure / {}'.format(str(e))) + return None + except TypeError: # None is returned if the default can't be determined # we're done in this case diff --git a/libs/apprise/__init__.py b/libs/apprise/__init__.py index 04f53b754..055a2369e 100644 --- a/libs/apprise/__init__.py +++ b/libs/apprise/__init__.py @@ -24,7 +24,7 @@ # THE SOFTWARE. __title__ = 'apprise' -__version__ = '0.7.8' +__version__ = '0.7.9' __author__ = 'Chris Caron' __license__ = 'MIT' __copywrite__ = 'Copyright (C) 2019 Chris Caron <[email protected]>' diff --git a/libs/apprise/i18n/apprise.pot b/libs/apprise/i18n/apprise.pot deleted file mode 100644 index b521b7cbe..000000000 --- a/libs/apprise/i18n/apprise.pot +++ /dev/null @@ -1,292 +0,0 @@ -# Translations template for apprise. -# Copyright (C) 2019 Chris Caron -# This file is distributed under the same license as the apprise project. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2019. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: apprise 0.7.8\n" -"Report-Msgid-Bugs-To: [email protected]\n" -"POT-Creation-Date: 2019-06-06 12:49-0400\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: LANGUAGE <[email protected]>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.7.0\n" - -msgid "API Key" -msgstr "" - -msgid "Access Key" -msgstr "" - -msgid "Access Key ID" -msgstr "" - -msgid "Access Secret" -msgstr "" - -msgid "Access Token" -msgstr "" - -msgid "Account SID" -msgstr "" - -msgid "Add Tokens" -msgstr "" - -msgid "Application Key" -msgstr "" - -msgid "Application Secret" -msgstr "" - -msgid "Auth Token" -msgstr "" - -msgid "Authorization Token" -msgstr "" - -msgid "Avatar Image" -msgstr "" - -msgid "Bot Name" -msgstr "" - -msgid "Bot Token" -msgstr "" - -msgid "Channels" -msgstr "" - -msgid "Consumer Key" -msgstr "" - -msgid "Consumer Secret" -msgstr "" - -msgid "Detect Bot Owner" -msgstr "" - -msgid "Device ID" -msgstr "" - -msgid "Display Footer" -msgstr "" - -msgid "Domain" -msgstr "" - -msgid "Duration" -msgstr "" - -msgid "Events" -msgstr "" - -msgid "Footer Logo" -msgstr "" - -msgid "From Email" -msgstr "" - -msgid "From Name" -msgstr "" - -msgid "From Phone No" -msgstr "" - -msgid "Group" -msgstr "" - -msgid "HTTP Header" -msgstr "" - -msgid "Hostname" -msgstr "" - -msgid "Include Image" -msgstr "" - -msgid "Modal" -msgstr "" - -msgid "Notify Format" -msgstr "" - -msgid "Organization" -msgstr "" - -msgid "Overflow Mode" -msgstr "" - -msgid "Password" -msgstr "" - -msgid "Path" -msgstr "" - -msgid "Port" -msgstr "" - -msgid "Priority" -msgstr "" - -msgid "Provider Key" -msgstr "" - -msgid "Region" -msgstr "" - -msgid "Region Name" -msgstr "" - -msgid "Remove Tokens" -msgstr "" - -msgid "Rooms" -msgstr "" - -msgid "SMTP Server" -msgstr "" - -msgid "Schema" -msgstr "" - -msgid "Secret Access Key" -msgstr "" - -msgid "Secret Key" -msgstr "" - -msgid "Secure Mode" -msgstr "" - -msgid "Server Timeout" -msgstr "" - -msgid "Sound" -msgstr "" - -msgid "Source JID" -msgstr "" - -msgid "Target Channel" -msgstr "" - -msgid "Target Chat ID" -msgstr "" - -msgid "Target Device" -msgstr "" - -msgid "Target Device ID" -msgstr "" - -msgid "Target Email" -msgstr "" - -msgid "Target Emails" -msgstr "" - -msgid "Target Encoded ID" -msgstr "" - -msgid "Target JID" -msgstr "" - -msgid "Target Phone No" -msgstr "" - -msgid "Target Room Alias" -msgstr "" - -msgid "Target Room ID" -msgstr "" - -msgid "Target Short Code" -msgstr "" - -msgid "Target Tag ID" -msgstr "" - -msgid "Target Topic" -msgstr "" - -msgid "Target User" -msgstr "" - -msgid "Targets" -msgstr "" - -msgid "Text To Speech" -msgstr "" - -msgid "To Channel ID" -msgstr "" - -msgid "To Email" -msgstr "" - -msgid "To User ID" -msgstr "" - -msgid "Token" -msgstr "" - -msgid "Token A" -msgstr "" - -msgid "Token B" -msgstr "" - -msgid "Token C" -msgstr "" - -msgid "Urgency" -msgstr "" - -msgid "Use Avatar" -msgstr "" - -msgid "User" -msgstr "" - -msgid "User Key" -msgstr "" - -msgid "User Name" -msgstr "" - -msgid "Username" -msgstr "" - -msgid "Verify SSL" -msgstr "" - -msgid "Version" -msgstr "" - -msgid "Webhook" -msgstr "" - -msgid "Webhook ID" -msgstr "" - -msgid "Webhook Mode" -msgstr "" - -msgid "Webhook Token" -msgstr "" - -msgid "X-Axis" -msgstr "" - -msgid "XEP" -msgstr "" - -msgid "Y-Axis" -msgstr "" - diff --git a/libs/apprise/i18n/en/LC_MESSAGES/apprise.po b/libs/apprise/i18n/en/LC_MESSAGES/apprise.po deleted file mode 100644 index 44451262c..000000000 --- a/libs/apprise/i18n/en/LC_MESSAGES/apprise.po +++ /dev/null @@ -1,293 +0,0 @@ -# English translations for apprise. -# Copyright (C) 2019 Chris Caron -# This file is distributed under the same license as the apprise project. -# Chris Caron <[email protected]>, 2019. -# -msgid "" -msgstr "" -"Project-Id-Version: apprise 0.7.6\n" -"Report-Msgid-Bugs-To: [email protected]\n" -"POT-Creation-Date: 2019-05-28 16:56-0400\n" -"PO-Revision-Date: 2019-05-24 20:00-0400\n" -"Last-Translator: Chris Caron <[email protected]>\n" -"Language: en\n" -"Language-Team: en <[email protected]>\n" -"Plural-Forms: nplurals=2; plural=(n != 1)\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.6.0\n" - -msgid "API Key" -msgstr "" - -msgid "Access Key" -msgstr "" - -msgid "Access Key ID" -msgstr "" - -msgid "Access Secret" -msgstr "" - -msgid "Access Token" -msgstr "" - -msgid "Account SID" -msgstr "" - -msgid "Add Tokens" -msgstr "" - -msgid "Application Key" -msgstr "" - -msgid "Application Secret" -msgstr "" - -msgid "Auth Token" -msgstr "" - -msgid "Authorization Token" -msgstr "" - -msgid "Avatar Image" -msgstr "" - -msgid "Bot Name" -msgstr "" - -msgid "Bot Token" -msgstr "" - -msgid "Channels" -msgstr "" - -msgid "Consumer Key" -msgstr "" - -msgid "Consumer Secret" -msgstr "" - -msgid "Detect Bot Owner" -msgstr "" - -msgid "Device ID" -msgstr "" - -msgid "Display Footer" -msgstr "" - -msgid "Domain" -msgstr "" - -msgid "Duration" -msgstr "" - -msgid "Events" -msgstr "" - -msgid "Footer Logo" -msgstr "" - -msgid "From Email" -msgstr "" - -msgid "From Name" -msgstr "" - -msgid "From Phone No" -msgstr "" - -msgid "Group" -msgstr "" - -msgid "HTTP Header" -msgstr "" - -msgid "Hostname" -msgstr "" - -msgid "Include Image" -msgstr "" - -msgid "Modal" -msgstr "" - -msgid "Notify Format" -msgstr "" - -msgid "Organization" -msgstr "" - -msgid "Overflow Mode" -msgstr "" - -msgid "Password" -msgstr "" - -msgid "Port" -msgstr "" - -msgid "Priority" -msgstr "" - -msgid "Provider Key" -msgstr "" - -msgid "Region" -msgstr "" - -msgid "Region Name" -msgstr "" - -msgid "Remove Tokens" -msgstr "" - -msgid "Rooms" -msgstr "" - -msgid "SMTP Server" -msgstr "" - -msgid "Schema" -msgstr "" - -msgid "Secret Access Key" -msgstr "" - -msgid "Secret Key" -msgstr "" - -msgid "Secure Mode" -msgstr "" - -msgid "Server Timeout" -msgstr "" - -msgid "Sound" -msgstr "" - -msgid "Source JID" -msgstr "" - -msgid "Target Channel" -msgstr "" - -msgid "Target Chat ID" -msgstr "" - -msgid "Target Device" -msgstr "" - -msgid "Target Device ID" -msgstr "" - -msgid "Target Email" -msgstr "" - -msgid "Target Emails" -msgstr "" - -msgid "Target Encoded ID" -msgstr "" - -msgid "Target JID" -msgstr "" - -msgid "Target Phone No" -msgstr "" - -msgid "Target Room Alias" -msgstr "" - -msgid "Target Room ID" -msgstr "" - -msgid "Target Short Code" -msgstr "" - -msgid "Target Tag ID" -msgstr "" - -msgid "Target Topic" -msgstr "" - -msgid "Target User" -msgstr "" - -msgid "Targets" -msgstr "" - -msgid "Text To Speech" -msgstr "" - -msgid "To Channel ID" -msgstr "" - -msgid "To Email" -msgstr "" - -msgid "To User ID" -msgstr "" - -msgid "Token" -msgstr "" - -msgid "Token A" -msgstr "" - -msgid "Token B" -msgstr "" - -msgid "Token C" -msgstr "" - -msgid "Urgency" -msgstr "" - -msgid "Use Avatar" -msgstr "" - -msgid "User" -msgstr "" - -msgid "User Key" -msgstr "" - -msgid "User Name" -msgstr "" - -msgid "Username" -msgstr "" - -msgid "Verify SSL" -msgstr "" - -msgid "Version" -msgstr "" - -msgid "Webhook" -msgstr "" - -msgid "Webhook ID" -msgstr "" - -msgid "Webhook Mode" -msgstr "" - -msgid "Webhook Token" -msgstr "" - -msgid "X-Axis" -msgstr "" - -msgid "XEP" -msgstr "" - -msgid "Y-Axis" -msgstr "" - -#~ msgid "Access Key Secret" -#~ msgstr "" - diff --git a/libs/apprise/plugins/NotifyEmby.py b/libs/apprise/plugins/NotifyEmby.py index fd791507b..82ac7da26 100644 --- a/libs/apprise/plugins/NotifyEmby.py +++ b/libs/apprise/plugins/NotifyEmby.py @@ -669,4 +669,21 @@ class NotifyEmby(NotifyBase): """ Deconstructor """ - self.logout() + try: + self.logout() + + except LookupError: + # Python v3.5 call to requests can sometimes throw the exception + # "/usr/lib64/python3.7/socket.py", line 748, in getaddrinfo + # LookupError: unknown encoding: idna + # + # This occurs every time when running unit-tests against Apprise: + # LANG=C.UTF-8 PYTHONPATH=$(pwd) py.test-3.7 + # + # There has been an open issue on this since Jan 2017. + # - https://bugs.python.org/issue29288 + # + # A ~similar~ issue can be identified here in the requests + # ticket system as unresolved and has provided work-arounds + # - https://github.com/kennethreitz/requests/issues/3578 + pass diff --git a/libs/apprise/plugins/NotifyFlock.py b/libs/apprise/plugins/NotifyFlock.py index 96ea52b4d..b3e65672a 100644 --- a/libs/apprise/plugins/NotifyFlock.py +++ b/libs/apprise/plugins/NotifyFlock.py @@ -55,9 +55,6 @@ FLOCK_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', } -# Default User -FLOCK_DEFAULT_USER = 'apprise' - # Used to detect a channel/user IS_CHANNEL_RE = re.compile(r'^(#|g:)(?P<id>[A-Z0-9_]{12})$', re.I) IS_USER_RE = re.compile(r'^(@|u:)?(?P<id>[A-Z0-9_]{12})$', re.I) @@ -223,7 +220,7 @@ class NotifyFlock(NotifyBase): 'token': self.token, 'flockml': body, 'sendAs': { - 'name': FLOCK_DEFAULT_USER if not self.user else self.user, + 'name': self.app_id if not self.user else self.user, # A Profile Image is only configured if we're configured to # allow it 'profileImage': None if not self.include_image diff --git a/libs/apprise/plugins/NotifyGitter.py b/libs/apprise/plugins/NotifyGitter.py index 8f187c128..47f69a078 100644 --- a/libs/apprise/plugins/NotifyGitter.py +++ b/libs/apprise/plugins/NotifyGitter.py @@ -261,7 +261,7 @@ class NotifyGitter(NotifyBase): def _fetch(self, url, payload=None, method='GET'): """ - Wrapper to POST + Wrapper to request object """ @@ -305,11 +305,7 @@ class NotifyGitter(NotifyBase): # period. wait = (self.ratelimit_reset - now).total_seconds() + 0.5 - # Always call throttle before any remote server i/o is made; for AWS - # time plays a huge factor in the headers being sent with the payload. - # So for AWS (SNS) requests we must throttle before they're generated - # and not directly before the i/o call like other notification - # services do. + # Always call throttle before any remote server i/o is made self.throttle(wait=wait) # fetch function @@ -328,8 +324,9 @@ class NotifyGitter(NotifyBase): NotifyGitter.http_response_code_lookup(r.status_code) self.logger.warning( - 'Failed to send Gitter POST to {}: ' + 'Failed to send Gitter {} to {}: ' '{}error={}.'.format( + method, url, ', ' if status_str else '', r.status_code)) @@ -361,8 +358,8 @@ class NotifyGitter(NotifyBase): except requests.RequestException as e: self.logger.warning( - 'Exception received when sending Gitter POST to {}: '. - format(url)) + 'Exception received when sending Gitter {} to {}: '. + format(method, url)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure diff --git a/libs/apprise/plugins/NotifyJoin.py b/libs/apprise/plugins/NotifyJoin.py index b9b80f2ad..864c7c162 100644 --- a/libs/apprise/plugins/NotifyJoin.py +++ b/libs/apprise/plugins/NotifyJoin.py @@ -88,10 +88,6 @@ class NotifyJoin(NotifyBase): # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_72 - # Limit results to just the first 2 line otherwise there is just to much - # content to display - body_max_line_count = 2 - # The maximum allowable characters allowed in the body per message body_maxlen = 1000 diff --git a/libs/apprise/plugins/NotifyMailgun.py b/libs/apprise/plugins/NotifyMailgun.py index ad0d5f91d..b53982f63 100644 --- a/libs/apprise/plugins/NotifyMailgun.py +++ b/libs/apprise/plugins/NotifyMailgun.py @@ -51,7 +51,6 @@ # the email will be transmitted from. If no email address is specified # then it will also become the 'to' address as well. # -import re import requests from .NotifyBase import NotifyBase @@ -60,9 +59,6 @@ from ..utils import parse_list from ..utils import is_email from ..AppriseLocale import gettext_lazy as _ -# Used to validate your personal access apikey -VALIDATE_API_KEY = re.compile(r'^[a-z0-9]{32}-[a-z0-9]{8}-[a-z0-9]{8}$', re.I) - # Provide some known codes Mailgun uses and what they translate to: # Based on https://documentation.mailgun.com/en/latest/api-intro.html#errors MAILGUN_HTTP_ERROR_MAP = { @@ -138,7 +134,6 @@ class NotifyMailgun(NotifyBase): 'apikey': { 'name': _('API Key'), 'type': 'string', - 'regex': (r'[a-z0-9]{32}-[a-z0-9]{8}-[a-z0-9]{8}', 'i'), 'private': True, 'required': True, }, @@ -184,12 +179,6 @@ class NotifyMailgun(NotifyBase): self.logger.warning(msg) raise TypeError(msg) - if not VALIDATE_API_KEY.match(self.apikey): - msg = 'The API Key specified ({}) is invalid.' \ - .format(apikey) - self.logger.warning(msg) - raise TypeError(msg) - # Validate our username if not self.user: msg = 'No username was specified.' diff --git a/libs/apprise/plugins/NotifyMatrix.py b/libs/apprise/plugins/NotifyMatrix.py index 7ede0799e..a5579e538 100644 --- a/libs/apprise/plugins/NotifyMatrix.py +++ b/libs/apprise/plugins/NotifyMatrix.py @@ -62,9 +62,6 @@ IS_ROOM_ID = re.compile( r'^\s*(!|!|%21)(?P<room>[a-z0-9-]+)((:|%3A)' r'(?P<home_server>[a-z0-9.-]+))?\s*$', re.I) -# Default User -SLACK_DEFAULT_USER = 'apprise' - class MatrixWebhookMode(object): # Webhook Mode is disabled @@ -265,13 +262,13 @@ class NotifyMatrix(NotifyBase): default_port = 443 if self.secure else 80 # Prepare our URL - url = '{schema}://{hostname}:{port}/{token}{webhook_path}'.format( + url = '{schema}://{hostname}:{port}/{webhook_path}/{token}'.format( schema='https' if self.secure else 'http', hostname=self.host, port='' if self.port is None or self.port == default_port else self.port, - token=access_token, webhook_path=MATRIX_V1_WEBHOOK_PATH, + token=access_token, ) # Retrieve our payload @@ -360,7 +357,7 @@ class NotifyMatrix(NotifyBase): # prepare JSON Object payload = { - 'username': self.user if self.user else SLACK_DEFAULT_USER, + 'username': self.user if self.user else self.app_id, # Use Markdown language 'mrkdwn': (self.notify_format == NotifyFormat.MARKDOWN), 'attachments': [{ diff --git a/libs/apprise/plugins/NotifyPushover.py b/libs/apprise/plugins/NotifyPushover.py index 3dca24408..ebc77ae04 100644 --- a/libs/apprise/plugins/NotifyPushover.py +++ b/libs/apprise/plugins/NotifyPushover.py @@ -194,12 +194,26 @@ class NotifyPushover(NotifyBase): 'regex': (r'[a-z]{1,12}', 'i'), 'default': PushoverSound.PUSHOVER, }, + 'retry': { + 'name': _('Retry'), + 'type': 'int', + 'min': 30, + 'default': 900, # 15 minutes + }, + 'expire': { + 'name': _('Expire'), + 'type': 'int', + 'min': 0, + 'max': 10800, + 'default': 3600, # 1 hour + }, 'to': { 'alias_of': 'targets', }, }) def __init__(self, token, targets=None, priority=None, sound=None, + retry=None, expire=None, **kwargs): """ Initialize Pushover Object @@ -240,6 +254,34 @@ class NotifyPushover(NotifyBase): else: self.priority = priority + # The following are for emergency alerts + if self.priority == PushoverPriority.EMERGENCY: + + # How often to resend notification, in seconds + self.retry = NotifyPushover.template_args['retry']['default'] + try: + self.retry = int(retry) + except (ValueError, TypeError): + # Do nothing + pass + + # How often to resend notification, in seconds + self.expire = NotifyPushover.template_args['expire']['default'] + try: + self.expire = int(expire) + except (ValueError, TypeError): + # Do nothing + pass + + if self.retry < 30: + msg = 'Retry must be at least 30.' + self.logger.warning(msg) + raise TypeError(msg) + if self.expire < 0 or self.expire > 10800: + msg = 'Expire has a max value of at most 10800 seconds.' + self.logger.warning(msg) + raise TypeError(msg) + if not self.user: msg = 'No user key was specified.' self.logger.warning(msg) @@ -289,6 +331,9 @@ class NotifyPushover(NotifyBase): 'sound': self.sound, } + if self.priority == PushoverPriority.EMERGENCY: + payload.update({'retry': self.retry, 'expire': self.expire}) + self.logger.debug('Pushover POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) @@ -365,6 +410,10 @@ class NotifyPushover(NotifyBase): else _map[self.priority], 'verify': 'yes' if self.verify_certificate else 'no', } + # Only add expire and retry for emergency messages, + # pushover ignores for all other priorities + if self.priority == PushoverPriority.EMERGENCY: + args.update({'expire': self.expire, 'retry': self.retry}) # Escape our devices devices = '/'.join([NotifyPushover.quote(x, safe='') @@ -422,6 +471,12 @@ class NotifyPushover(NotifyBase): results['sound'] = \ NotifyPushover.unquote(results['qsd']['sound']) + # Get expire and retry + if 'expire' in results['qsd'] and len(results['qsd']['expire']): + results['expire'] = results['qsd']['expire'] + if 'retry' in results['qsd'] and len(results['qsd']['retry']): + results['retry'] = results['qsd']['retry'] + # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ diff --git a/libs/apprise/plugins/NotifySlack.py b/libs/apprise/plugins/NotifySlack.py index 690952ef2..17a7ebbc6 100644 --- a/libs/apprise/plugins/NotifySlack.py +++ b/libs/apprise/plugins/NotifySlack.py @@ -60,9 +60,6 @@ VALIDATE_TOKEN_B = re.compile(r'[A-Z0-9]{9}') # /........./........./CCCCCCCCCCCCCCCCCCCCCCCC VALIDATE_TOKEN_C = re.compile(r'[A-Za-z0-9]{24}') -# Default User -SLACK_DEFAULT_USER = 'apprise' - # Extend HTTP Error Messages SLACK_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', @@ -228,7 +225,7 @@ class NotifySlack(NotifyBase): if not self.user: self.logger.warning( - 'No user was specified; using %s.' % SLACK_DEFAULT_USER) + 'No user was specified; using "%s".' % self.app_id) # Build list of channels self.channels = parse_list(targets) @@ -287,7 +284,7 @@ class NotifySlack(NotifyBase): # prepare JSON Object payload = { - 'username': self.user if self.user else SLACK_DEFAULT_USER, + 'username': self.user if self.user else self.app_id, # Use Markdown language 'mrkdwn': (self.notify_format == NotifyFormat.MARKDOWN), 'attachments': [{ diff --git a/libs/apprise/plugins/NotifyTelegram.py b/libs/apprise/plugins/NotifyTelegram.py index a06f31f72..2ce0ddc9f 100644 --- a/libs/apprise/plugins/NotifyTelegram.py +++ b/libs/apprise/plugins/NotifyTelegram.py @@ -52,8 +52,6 @@ import requests import re -from os.path import basename - from json import loads from json import dumps @@ -227,56 +225,70 @@ class NotifyTelegram(NotifyBase): if not path: # No image to send self.logger.debug( - 'Telegram Image does not exist for %s' % (notify_type)) + 'Telegram image does not exist for %s' % (notify_type)) # No need to fail; we may have been configured this way through # the apprise.AssetObject() return True - # Configure file payload (for upload) - files = { - 'photo': (basename(path), open(path), 'rb'), - } + try: + with open(path, 'rb') as f: + # Configure file payload (for upload) + files = { + 'photo': f, + } - payload = { - 'chat_id': chat_id, - } + payload = { + 'chat_id': chat_id, + } - self.logger.debug( - 'Telegram Image POST URL: %s (cert_verify=%r)' % ( - url, self.verify_certificate)) + self.logger.debug( + 'Telegram image POST URL: %s (cert_verify=%r)' % ( + url, self.verify_certificate)) - try: - r = requests.post( - url, - files=files, - data=payload, - verify=self.verify_certificate, - ) + try: + r = requests.post( + url, + files=files, + data=payload, + verify=self.verify_certificate, + ) - if r.status_code != requests.codes.ok: - # We had a problem - status_str = \ - NotifyTelegram.http_response_code_lookup(r.status_code) + if r.status_code != requests.codes.ok: + # We had a problem + status_str = NotifyTelegram\ + .http_response_code_lookup(r.status_code) - self.logger.warning( - 'Failed to send Telegram Image: ' - '{}{}error={}.'.format( - status_str, - ', ' if status_str else '', - r.status_code)) + self.logger.warning( + 'Failed to send Telegram image: ' + '{}{}error={}.'.format( + status_str, + ', ' if status_str else '', + r.status_code)) - self.logger.debug('Response Details:\r\n{}'.format(r.content)) + self.logger.debug( + 'Response Details:\r\n{}'.format(r.content)) - return False + return False - except requests.RequestException as e: - self.logger.warning( - 'A connection error occured posting Telegram Image.') - self.logger.debug('Socket Exception: %s' % str(e)) - return False + except requests.RequestException as e: + self.logger.warning( + 'A connection error occured posting Telegram image.') + self.logger.debug('Socket Exception: %s' % str(e)) + return False + + return True + + except (IOError, OSError): + # IOError is present for backwards compatibility with Python + # versions older then 3.3. >= 3.3 throw OSError now. + + # Could not open and/or read the file; this is not a problem since + # we scan a lot of default paths. + self.logger.error( + 'File can not be opened for read: {}'.format(path)) - return True + return False def detect_bot_owner(self): """ diff --git a/libs/apprise/plugins/NotifyTwitter/__init__.py b/libs/apprise/plugins/NotifyTwitter/__init__.py deleted file mode 100644 index 5b4411db8..000000000 --- a/libs/apprise/plugins/NotifyTwitter/__init__.py +++ /dev/null @@ -1,272 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2019 Chris Caron <[email protected]> -# All rights reserved. -# -# This code is licensed under the MIT License. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files(the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions : -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -from . import tweepy -from ..NotifyBase import NotifyBase -from ...common import NotifyType -from ...utils import parse_list -from ...AppriseLocale import gettext_lazy as _ - - -class NotifyTwitter(NotifyBase): - """ - A wrapper to Twitter Notifications - - """ - - # The default descriptive name associated with the Notification - service_name = 'Twitter' - - # The services URL - service_url = 'https://twitter.com/' - - # The default secure protocol - secure_protocol = 'tweet' - - # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_twitter' - - # The maximum allowable characters allowed in the body per message - # This is used during a Private DM Message Size (not Public Tweets - # which are limited to 240 characters) - body_maxlen = 4096 - - # Twitter does have titles when creating a message - title_maxlen = 0 - - templates = ( - '{schema}://{user}@{ckey}{csecret}/{akey}/{asecret}', - ) - - # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'ckey': { - 'name': _('Consumer Key'), - 'type': 'string', - 'private': True, - 'required': True, - }, - 'csecret': { - 'name': _('Consumer Secret'), - 'type': 'string', - 'private': True, - 'required': True, - }, - 'akey': { - 'name': _('Access Key'), - 'type': 'string', - 'private': True, - 'required': True, - }, - 'asecret': { - 'name': _('Access Secret'), - 'type': 'string', - 'private': True, - 'required': True, - }, - 'user': { - 'name': _('User'), - 'type': 'string', - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', - }, - }) - - # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'to': { - 'alias_of': 'targets', - }, - }) - - def __init__(self, ckey, csecret, akey, asecret, targets=None, **kwargs): - """ - Initialize Twitter Object - - """ - super(NotifyTwitter, self).__init__(**kwargs) - - if not ckey: - msg = 'An invalid Consumer API Key was specified.' - self.logger.warning(msg) - raise TypeError(msg) - - if not csecret: - msg = 'An invalid Consumer Secret API Key was specified.' - self.logger.warning(msg) - raise TypeError(msg) - - if not akey: - msg = 'An invalid Access Token API Key was specified.' - self.logger.warning(msg) - raise TypeError(msg) - - if not asecret: - msg = 'An invalid Access Token Secret API Key was specified.' - self.logger.warning(msg) - raise TypeError(msg) - - # Identify our targets - self.targets = parse_list(targets) - - if len(self.targets) == 0 and not self.user: - msg = 'No user(s) were specified.' - self.logger.warning(msg) - raise TypeError(msg) - - # Store our data - self.ckey = ckey - self.csecret = csecret - self.akey = akey - self.asecret = asecret - - return - - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform Twitter Notification - """ - - try: - # Attempt to Establish a connection to Twitter - self.auth = tweepy.OAuthHandler(self.ckey, self.csecret) - - # Apply our Access Tokens - self.auth.set_access_token(self.akey, self.asecret) - - except Exception: - self.logger.warning( - 'Twitter authentication failed; ' - 'please verify your configuration.' - ) - return False - - # Get ourselves a list of targets - users = list(self.targets) - if not users: - # notify ourselves - users.append(self.user) - - # Error Tracking - has_error = False - - while len(users) > 0: - # Get our user - user = users.pop(0) - - # Always call throttle before any remote server i/o is made to - # avoid thrashing the remote server and risk being blocked. - self.throttle() - - try: - # Get our API - api = tweepy.API(self.auth) - - # Send our Direct Message - api.send_direct_message(user, text=body) - self.logger.info( - 'Sent Twitter DM notification to {}.'.format(user)) - - except Exception as e: - self.logger.warning( - 'A Connection error occured sending Twitter ' - 'direct message to %s.' % user) - self.logger.debug('Twitter Exception: %s' % str(e)) - - # Track our error - has_error = True - - return not has_error - - def url(self): - """ - Returns the URL built dynamically based on specified arguments. - """ - - # Define any arguments set - args = { - 'format': self.notify_format, - 'overflow': self.overflow_mode, - 'verify': 'yes' if self.verify_certificate else 'no', - } - - if len(self.targets) > 0: - args['to'] = ','.join([NotifyTwitter.quote(x, safe='') - for x in self.targets]) - - return '{schema}://{auth}{ckey}/{csecret}/{akey}/{asecret}' \ - '/?{args}'.format( - auth='' if not self.user else '{user}@'.format( - user=NotifyTwitter.quote(self.user, safe='')), - schema=self.secure_protocol, - ckey=NotifyTwitter.quote(self.ckey, safe=''), - asecret=NotifyTwitter.quote(self.csecret, safe=''), - akey=NotifyTwitter.quote(self.akey, safe=''), - csecret=NotifyTwitter.quote(self.asecret, safe=''), - args=NotifyTwitter.urlencode(args)) - - @staticmethod - def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to substantiate this object. - - """ - results = NotifyBase.parse_url(url) - - if not results: - # We're done early as we couldn't load the results - return results - - # Apply our settings now - - # The first token is stored in the hostname - consumer_key = NotifyTwitter.unquote(results['host']) - - # Now fetch the remaining tokens - try: - consumer_secret, access_token_key, access_token_secret = \ - NotifyTwitter.split_path(results['fullpath'])[0:3] - - except (ValueError, AttributeError, IndexError): - # Force some bad values that will get caught - # in parsing later - consumer_secret = None - access_token_key = None - access_token_secret = None - - results['ckey'] = consumer_key - results['csecret'] = consumer_secret - results['akey'] = access_token_key - results['asecret'] = access_token_secret - - # Support the to= allowing one to identify more then one user to tweet - # too - results['targets'] = NotifyTwitter.parse_list(results['qsd'].get('to')) - - return results diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/__init__.py b/libs/apprise/plugins/NotifyTwitter/tweepy/__init__.py deleted file mode 100644 index 3466d0bc4..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# Tweepy -# Copyright 2009-2010 Joshua Roesslein -# See LICENSE for details. - -""" -Tweepy Twitter API library -""" -__version__ = '3.6.0' -__author__ = 'Joshua Roesslein' -__license__ = 'MIT' - -from .models import Status, User, DirectMessage, Friendship, SavedSearch, SearchResults, ModelFactory, Category -from .error import TweepError, RateLimitError -from .api import API -from .cache import Cache, MemoryCache, FileCache -from .auth import OAuthHandler, AppAuthHandler -from .streaming import Stream, StreamListener -from .cursor import Cursor - -# Global, unauthenticated instance of API -api = API() - -def debug(enable=True, level=1): - from six.moves.http_client import HTTPConnection - HTTPConnection.debuglevel = level diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/api.py b/libs/apprise/plugins/NotifyTwitter/tweepy/api.py deleted file mode 100644 index 22696b708..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/api.py +++ /dev/null @@ -1,1386 +0,0 @@ -# Tweepy -# Copyright 2009-2010 Joshua Roesslein -# See LICENSE for details. - -from __future__ import print_function - -import os -import mimetypes - -import six - -from .binder import bind_api -from .error import TweepError -from .parsers import ModelParser, Parser -from .utils import list_to_csv - - -class API(object): - """Twitter API""" - - def __init__(self, auth_handler=None, - host='api.twitter.com', search_host='search.twitter.com', - upload_host='upload.twitter.com', cache=None, api_root='/1.1', - search_root='', upload_root='/1.1', retry_count=0, - retry_delay=0, retry_errors=None, timeout=60, parser=None, - compression=False, wait_on_rate_limit=False, - wait_on_rate_limit_notify=False, proxy=''): - """ Api instance Constructor - - :param auth_handler: - :param host: url of the server of the rest api, default:'api.twitter.com' - :param search_host: url of the search server, default:'search.twitter.com' - :param upload_host: url of the upload server, default:'upload.twitter.com' - :param cache: Cache to query if a GET method is used, default:None - :param api_root: suffix of the api version, default:'/1.1' - :param search_root: suffix of the search version, default:'' - :param upload_root: suffix of the upload version, default:'/1.1' - :param retry_count: number of allowed retries, default:0 - :param retry_delay: delay in second between retries, default:0 - :param retry_errors: default:None - :param timeout: delay before to consider the request as timed out in seconds, default:60 - :param parser: ModelParser instance to parse the responses, default:None - :param compression: If the response is compressed, default:False - :param wait_on_rate_limit: If the api wait when it hits the rate limit, default:False - :param wait_on_rate_limit_notify: If the api print a notification when the rate limit is hit, default:False - :param proxy: Url to use as proxy during the HTTP request, default:'' - - :raise TypeError: If the given parser is not a ModelParser instance. - """ - self.auth = auth_handler - self.host = host - self.search_host = search_host - self.upload_host = upload_host - self.api_root = api_root - self.search_root = search_root - self.upload_root = upload_root - self.cache = cache - self.compression = compression - self.retry_count = retry_count - self.retry_delay = retry_delay - self.retry_errors = retry_errors - self.timeout = timeout - self.wait_on_rate_limit = wait_on_rate_limit - self.wait_on_rate_limit_notify = wait_on_rate_limit_notify - self.parser = parser or ModelParser() - self.proxy = {} - if proxy: - self.proxy['https'] = proxy - - # Attempt to explain more clearly the parser argument requirements - # https://github.com/tweepy/tweepy/issues/421 - # - parser_type = Parser - if not isinstance(self.parser, parser_type): - raise TypeError( - '"parser" argument has to be an instance of "{required}".' - ' It is currently a {actual}.'.format( - required=parser_type.__name__, - actual=type(self.parser) - ) - ) - - @property - def home_timeline(self): - """ :reference: https://dev.twitter.com/rest/reference/get/statuses/home_timeline - :allowed_param:'since_id', 'max_id', 'count' - """ - return bind_api( - api=self, - path='/statuses/home_timeline.json', - payload_type='status', payload_list=True, - allowed_param=['since_id', 'max_id', 'count'], - require_auth=True - ) - - def statuses_lookup(self, id_, include_entities=None, - trim_user=None, map_=None, tweet_mode=None): - return self._statuses_lookup(list_to_csv(id_), include_entities, - trim_user, map_, tweet_mode) - - @property - def _statuses_lookup(self): - """ :reference: https://dev.twitter.com/rest/reference/get/statuses/lookup - :allowed_param:'id', 'include_entities', 'trim_user', 'map', 'tweet_mode' - """ - return bind_api( - api=self, - path='/statuses/lookup.json', - payload_type='status', payload_list=True, - allowed_param=['id', 'include_entities', 'trim_user', 'map', 'tweet_mode'], - require_auth=True - ) - - @property - def user_timeline(self): - """ :reference: https://dev.twitter.com/rest/reference/get/statuses/user_timeline - :allowed_param:'id', 'user_id', 'screen_name', 'since_id', 'max_id', 'count', 'include_rts', 'trim_user', 'exclude_replies' - """ - return bind_api( - api=self, - path='/statuses/user_timeline.json', - payload_type='status', payload_list=True, - allowed_param=['id', 'user_id', 'screen_name', 'since_id', - 'max_id', 'count', 'include_rts', 'trim_user', - 'exclude_replies'] - ) - - @property - def mentions_timeline(self): - """ :reference: https://dev.twitter.com/rest/reference/get/statuses/mentions_timeline - :allowed_param:'since_id', 'max_id', 'count' - """ - return bind_api( - api=self, - path='/statuses/mentions_timeline.json', - payload_type='status', payload_list=True, - allowed_param=['since_id', 'max_id', 'count'], - require_auth=True - ) - - @property - def related_results(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/get/related_results/show/%3id.format - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/related_results/show/{id}.json', - payload_type='relation', payload_list=True, - allowed_param=['id'], - require_auth=False - ) - - @property - def retweets_of_me(self): - """ :reference: https://dev.twitter.com/rest/reference/get/statuses/retweets_of_me - :allowed_param:'since_id', 'max_id', 'count' - """ - return bind_api( - api=self, - path='/statuses/retweets_of_me.json', - payload_type='status', payload_list=True, - allowed_param=['since_id', 'max_id', 'count'], - require_auth=True - ) - - @property - def get_status(self): - """ :reference: https://dev.twitter.com/rest/reference/get/statuses/show/%3Aid - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/statuses/show.json', - payload_type='status', - allowed_param=['id'] - ) - - def update_status(self, *args, **kwargs): - """ :reference: https://dev.twitter.com/rest/reference/post/statuses/update - :allowed_param:'status', 'in_reply_to_status_id', 'in_reply_to_status_id_str', 'auto_populate_reply_metadata', 'lat', 'long', 'source', 'place_id', 'display_coordinates', 'media_ids' - """ - post_data = {} - media_ids = kwargs.pop("media_ids", None) - if media_ids is not None: - post_data["media_ids"] = list_to_csv(media_ids) - - return bind_api( - api=self, - path='/statuses/update.json', - method='POST', - payload_type='status', - allowed_param=['status', 'in_reply_to_status_id', 'in_reply_to_status_id_str', 'auto_populate_reply_metadata', 'lat', 'long', 'source', 'place_id', 'display_coordinates'], - require_auth=True - )(post_data=post_data, *args, **kwargs) - - def media_upload(self, filename, *args, **kwargs): - """ :reference: https://dev.twitter.com/rest/reference/post/media/upload - :allowed_param: - """ - f = kwargs.pop('file', None) - headers, post_data = API._pack_image(filename, 4883, form_field='media', f=f) - kwargs.update({'headers': headers, 'post_data': post_data}) - - return bind_api( - api=self, - path='/media/upload.json', - method='POST', - payload_type='media', - allowed_param=[], - require_auth=True, - upload_api=True - )(*args, **kwargs) - - def update_with_media(self, filename, *args, **kwargs): - """ :reference: https://dev.twitter.com/rest/reference/post/statuses/update_with_media - :allowed_param:'status', 'possibly_sensitive', 'in_reply_to_status_id', 'in_reply_to_status_id_str', 'auto_populate_reply_metadata', 'lat', 'long', 'place_id', 'display_coordinates' - """ - f = kwargs.pop('file', None) - headers, post_data = API._pack_image(filename, 3072, form_field='media[]', f=f) - kwargs.update({'headers': headers, 'post_data': post_data}) - - return bind_api( - api=self, - path='/statuses/update_with_media.json', - method='POST', - payload_type='status', - allowed_param=[ - 'status', 'possibly_sensitive', 'in_reply_to_status_id', 'in_reply_to_status_id_str', - 'auto_populate_reply_metadata', 'lat', 'long', 'place_id', 'display_coordinates' - ], - require_auth=True - )(*args, **kwargs) - - @property - def destroy_status(self): - """ :reference: https://dev.twitter.com/rest/reference/post/statuses/destroy/%3Aid - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/statuses/destroy/{id}.json', - method='POST', - payload_type='status', - allowed_param=['id'], - require_auth=True - ) - - @property - def retweet(self): - """ :reference: https://dev.twitter.com/rest/reference/post/statuses/retweet/%3Aid - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/statuses/retweet/{id}.json', - method='POST', - payload_type='status', - allowed_param=['id'], - require_auth=True - ) - - @property - def unretweet(self): - """ :reference: https://dev.twitter.com/rest/reference/post/statuses/unretweet/%3Aid - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/statuses/unretweet/{id}.json', - method='POST', - payload_type='status', - allowed_param=['id'], - require_auth=True - ) - - @property - def retweets(self): - """ :reference: https://dev.twitter.com/rest/reference/get/statuses/retweets/%3Aid - :allowed_param:'id', 'count' - """ - return bind_api( - api=self, - path='/statuses/retweets/{id}.json', - payload_type='status', payload_list=True, - allowed_param=['id', 'count'], - require_auth=True - ) - - @property - def retweeters(self): - """ :reference: https://dev.twitter.com/rest/reference/get/statuses/retweeters/ids - :allowed_param:'id', 'cursor', 'stringify_ids - """ - return bind_api( - api=self, - path='/statuses/retweeters/ids.json', - payload_type='ids', - allowed_param=['id', 'cursor', 'stringify_ids'] - ) - - @property - def get_user(self): - """ :reference: https://dev.twitter.com/rest/reference/get/users/show - :allowed_param:'id', 'user_id', 'screen_name' - """ - return bind_api( - api=self, - path='/users/show.json', - payload_type='user', - allowed_param=['id', 'user_id', 'screen_name'] - ) - - @property - def get_oembed(self): - """ :reference: https://dev.twitter.com/rest/reference/get/statuses/oembed - :allowed_param:'id', 'url', 'maxwidth', 'hide_media', 'omit_script', 'align', 'related', 'lang' - """ - return bind_api( - api=self, - path='/statuses/oembed.json', - payload_type='json', - allowed_param=['id', 'url', 'maxwidth', 'hide_media', 'omit_script', 'align', 'related', 'lang'] - ) - - def lookup_users(self, user_ids=None, screen_names=None, include_entities=None): - """ Perform bulk look up of users from user ID or screenname """ - post_data = {} - if include_entities is not None: - include_entities = 'true' if include_entities else 'false' - post_data['include_entities'] = include_entities - if user_ids: - post_data['user_id'] = list_to_csv(user_ids) - if screen_names: - post_data['screen_name'] = list_to_csv(screen_names) - - return self._lookup_users(post_data=post_data) - - @property - def _lookup_users(self): - """ :reference: https://dev.twitter.com/rest/reference/get/users/lookup - allowed_param='user_id', 'screen_name', 'include_entities' - """ - return bind_api( - api=self, - path='/users/lookup.json', - payload_type='user', payload_list=True, - method='POST', - allowed_param=['user_id', 'screen_name', 'include_entities'] - ) - - def me(self): - """ Get the authenticated user """ - return self.get_user(screen_name=self.auth.get_username()) - - @property - def search_users(self): - """ :reference: https://dev.twitter.com/rest/reference/get/users/search - :allowed_param:'q', 'count', 'page' - """ - return bind_api( - api=self, - path='/users/search.json', - payload_type='user', payload_list=True, - require_auth=True, - allowed_param=['q', 'count', 'page'] - ) - - @property - def suggested_users(self): - """ :reference: https://dev.twitter.com/rest/reference/get/users/suggestions/%3Aslug - :allowed_param:'slug', 'lang' - """ - return bind_api( - api=self, - path='/users/suggestions/{slug}.json', - payload_type='user', payload_list=True, - require_auth=True, - allowed_param=['slug', 'lang'] - ) - - @property - def suggested_categories(self): - """ :reference: https://dev.twitter.com/rest/reference/get/users/suggestions - :allowed_param:'lang' - """ - return bind_api( - api=self, - path='/users/suggestions.json', - payload_type='category', payload_list=True, - allowed_param=['lang'], - require_auth=True - ) - - @property - def suggested_users_tweets(self): - """ :reference: https://dev.twitter.com/rest/reference/get/users/suggestions/%3Aslug/members - :allowed_param:'slug' - """ - return bind_api( - api=self, - path='/users/suggestions/{slug}/members.json', - payload_type='status', payload_list=True, - allowed_param=['slug'], - require_auth=True - ) - - @property - def direct_messages(self): - """ :reference: https://dev.twitter.com/rest/reference/get/direct_messages - :allowed_param:'since_id', 'max_id', 'count', 'full_text' - """ - return bind_api( - api=self, - path='/direct_messages.json', - payload_type='direct_message', payload_list=True, - allowed_param=['since_id', 'max_id', 'count', 'full_text'], - require_auth=True - ) - - @property - def get_direct_message(self): - """ :reference: https://dev.twitter.com/rest/reference/get/direct_messages/show - :allowed_param:'id', 'full_text' - """ - return bind_api( - api=self, - path='/direct_messages/show/{id}.json', - payload_type='direct_message', - allowed_param=['id', 'full_text'], - require_auth=True - ) - - @property - def sent_direct_messages(self): - """ :reference: https://dev.twitter.com/rest/reference/get/direct_messages/sent - :allowed_param:'since_id', 'max_id', 'count', 'page', 'full_text' - """ - return bind_api( - api=self, - path='/direct_messages/sent.json', - payload_type='direct_message', payload_list=True, - allowed_param=['since_id', 'max_id', 'count', 'page', 'full_text'], - require_auth=True - ) - - @property - def send_direct_message(self): - """ :reference: https://dev.twitter.com/rest/reference/post/direct_messages/new - :allowed_param:'user', 'screen_name', 'user_id', 'text' - """ - return bind_api( - api=self, - path='/direct_messages/new.json', - method='POST', - payload_type='direct_message', - allowed_param=['user', 'screen_name', 'user_id', 'text'], - require_auth=True - ) - - @property - def destroy_direct_message(self): - """ :reference: https://dev.twitter.com/rest/reference/post/direct_messages/destroy - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/direct_messages/destroy.json', - method='POST', - payload_type='direct_message', - allowed_param=['id'], - require_auth=True - ) - - @property - def create_friendship(self): - """ :reference: https://dev.twitter.com/rest/reference/post/friendships/create - :allowed_param:'id', 'user_id', 'screen_name', 'follow' - """ - return bind_api( - api=self, - path='/friendships/create.json', - method='POST', - payload_type='user', - allowed_param=['id', 'user_id', 'screen_name', 'follow'], - require_auth=True - ) - - @property - def destroy_friendship(self): - """ :reference: https://dev.twitter.com/rest/reference/post/friendships/destroy - :allowed_param:'id', 'user_id', 'screen_name' - """ - return bind_api( - api=self, - path='/friendships/destroy.json', - method='POST', - payload_type='user', - allowed_param=['id', 'user_id', 'screen_name'], - require_auth=True - ) - - @property - def show_friendship(self): - """ :reference: https://dev.twitter.com/rest/reference/get/friendships/show - :allowed_param:'source_id', 'source_screen_name', 'target_id', 'target_screen_name' - """ - return bind_api( - api=self, - path='/friendships/show.json', - payload_type='friendship', - allowed_param=['source_id', 'source_screen_name', - 'target_id', 'target_screen_name'] - ) - - def lookup_friendships(self, user_ids=None, screen_names=None): - """ Perform bulk look up of friendships from user ID or screenname """ - return self._lookup_friendships(list_to_csv(user_ids), list_to_csv(screen_names)) - - @property - def _lookup_friendships(self): - """ :reference: https://dev.twitter.com/rest/reference/get/friendships/lookup - :allowed_param:'user_id', 'screen_name' - """ - return bind_api( - api=self, - path='/friendships/lookup.json', - payload_type='relationship', payload_list=True, - allowed_param=['user_id', 'screen_name'], - require_auth=True - ) - - @property - def friends_ids(self): - """ :reference: https://dev.twitter.com/rest/reference/get/friends/ids - :allowed_param:'id', 'user_id', 'screen_name', 'cursor' - """ - return bind_api( - api=self, - path='/friends/ids.json', - payload_type='ids', - allowed_param=['id', 'user_id', 'screen_name', 'cursor'] - ) - - @property - def friends(self): - """ :reference: https://dev.twitter.com/rest/reference/get/friends/list - :allowed_param:'id', 'user_id', 'screen_name', 'cursor', 'skip_status', 'include_user_entities' - """ - return bind_api( - api=self, - path='/friends/list.json', - payload_type='user', payload_list=True, - allowed_param=['id', 'user_id', 'screen_name', 'cursor', 'skip_status', 'include_user_entities'] - ) - - @property - def friendships_incoming(self): - """ :reference: https://dev.twitter.com/rest/reference/get/friendships/incoming - :allowed_param:'cursor' - """ - return bind_api( - api=self, - path='/friendships/incoming.json', - payload_type='ids', - allowed_param=['cursor'] - ) - - @property - def friendships_outgoing(self): - """ :reference: https://dev.twitter.com/rest/reference/get/friendships/outgoing - :allowed_param:'cursor' - """ - return bind_api( - api=self, - path='/friendships/outgoing.json', - payload_type='ids', - allowed_param=['cursor'] - ) - - @property - def followers_ids(self): - """ :reference: https://dev.twitter.com/rest/reference/get/followers/ids - :allowed_param:'id', 'user_id', 'screen_name', 'cursor', 'count' - """ - return bind_api( - api=self, - path='/followers/ids.json', - payload_type='ids', - allowed_param=['id', 'user_id', 'screen_name', 'cursor', 'count'] - ) - - @property - def followers(self): - """ :reference: https://dev.twitter.com/rest/reference/get/followers/list - :allowed_param:'id', 'user_id', 'screen_name', 'cursor', 'count', 'skip_status', 'include_user_entities' - """ - return bind_api( - api=self, - path='/followers/list.json', - payload_type='user', payload_list=True, - allowed_param=['id', 'user_id', 'screen_name', 'cursor', 'count', - 'skip_status', 'include_user_entities'] - ) - - @property - def get_settings(self): - """ :reference: https://dev.twitter.com/rest/reference/get/account/settings - """ - return bind_api( - api=self, - path='/account/settings.json', - payload_type='json', - use_cache=False - ) - - @property - def set_settings(self): - """ :reference: https://dev.twitter.com/rest/reference/post/account/settings - :allowed_param:'sleep_time_enabled', 'start_sleep_time', - 'end_sleep_time', 'time_zone', 'trend_location_woeid', - 'allow_contributor_request', 'lang' - """ - return bind_api( - api=self, - path='/account/settings.json', - method='POST', - payload_type='json', - allowed_param=['sleep_time_enabled', 'start_sleep_time', - 'end_sleep_time', 'time_zone', - 'trend_location_woeid', 'allow_contributor_request', - 'lang'], - use_cache=False - ) - - def verify_credentials(self, **kargs): - """ :reference: https://dev.twitter.com/rest/reference/get/account/verify_credentials - :allowed_param:'include_entities', 'skip_status', 'include_email' - """ - try: - return bind_api( - api=self, - path='/account/verify_credentials.json', - payload_type='user', - require_auth=True, - allowed_param=['include_entities', 'skip_status', 'include_email'], - )(**kargs) - except TweepError as e: - if e.response and e.response.status == 401: - return False - raise - - @property - def rate_limit_status(self): - """ :reference: https://dev.twitter.com/rest/reference/get/application/rate_limit_status - :allowed_param:'resources' - """ - return bind_api( - api=self, - path='/application/rate_limit_status.json', - payload_type='json', - allowed_param=['resources'], - use_cache=False - ) - - @property - def set_delivery_device(self): - """ :reference: https://dev.twitter.com/rest/reference/post/account/update_delivery_device - :allowed_param:'device' - """ - return bind_api( - api=self, - path='/account/update_delivery_device.json', - method='POST', - allowed_param=['device'], - payload_type='user', - require_auth=True - ) - - def update_profile_image(self, filename, file_=None): - """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile_image - :allowed_param:'include_entities', 'skip_status' - """ - headers, post_data = API._pack_image(filename, 700, f=file_) - return bind_api( - api=self, - path='/account/update_profile_image.json', - method='POST', - payload_type='user', - allowed_param=['include_entities', 'skip_status'], - require_auth=True - )(self, post_data=post_data, headers=headers) - - def update_profile_background_image(self, filename, **kargs): - """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile_background_image - :allowed_param:'tile', 'include_entities', 'skip_status', 'use' - """ - f = kargs.pop('file', None) - headers, post_data = API._pack_image(filename, 800, f=f) - bind_api( - api=self, - path='/account/update_profile_background_image.json', - method='POST', - payload_type='user', - allowed_param=['tile', 'include_entities', 'skip_status', 'use'], - require_auth=True - )(post_data=post_data, headers=headers) - - def update_profile_banner(self, filename, **kargs): - """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile_banner - :allowed_param:'width', 'height', 'offset_left', 'offset_right' - """ - f = kargs.pop('file', None) - headers, post_data = API._pack_image(filename, 700, form_field="banner", f=f) - bind_api( - api=self, - path='/account/update_profile_banner.json', - method='POST', - allowed_param=['width', 'height', 'offset_left', 'offset_right'], - require_auth=True - )(post_data=post_data, headers=headers) - - @property - def update_profile(self): - """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile - :allowed_param:'name', 'url', 'location', 'description', 'profile_link_color' - """ - return bind_api( - api=self, - path='/account/update_profile.json', - method='POST', - payload_type='user', - allowed_param=['name', 'url', 'location', 'description', 'profile_link_color'], - require_auth=True - ) - - @property - def favorites(self): - """ :reference: https://dev.twitter.com/rest/reference/get/favorites/list - :allowed_param:'screen_name', 'user_id', 'max_id', 'count', 'since_id', 'max_id' - """ - return bind_api( - api=self, - path='/favorites/list.json', - payload_type='status', payload_list=True, - allowed_param=['screen_name', 'user_id', 'max_id', 'count', 'since_id', 'max_id'] - ) - - @property - def create_favorite(self): - """ :reference:https://dev.twitter.com/rest/reference/post/favorites/create - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/favorites/create.json', - method='POST', - payload_type='status', - allowed_param=['id'], - require_auth=True - ) - - @property - def destroy_favorite(self): - """ :reference: https://dev.twitter.com/rest/reference/post/favorites/destroy - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/favorites/destroy.json', - method='POST', - payload_type='status', - allowed_param=['id'], - require_auth=True - ) - - @property - def create_block(self): - """ :reference: https://dev.twitter.com/rest/reference/post/blocks/create - :allowed_param:'id', 'user_id', 'screen_name' - """ - return bind_api( - api=self, - path='/blocks/create.json', - method='POST', - payload_type='user', - allowed_param=['id', 'user_id', 'screen_name'], - require_auth=True - ) - - @property - def destroy_block(self): - """ :reference: https://dev.twitter.com/rest/reference/post/blocks/destroy - :allowed_param:'id', 'user_id', 'screen_name' - """ - return bind_api( - api=self, - path='/blocks/destroy.json', - method='POST', - payload_type='user', - allowed_param=['id', 'user_id', 'screen_name'], - require_auth=True - ) - - @property - def mutes_ids(self): - """ :reference: https://dev.twitter.com/rest/reference/get/mutes/users/ids """ - return bind_api( - api=self, - path='/mutes/users/ids.json', - payload_type='json', - require_auth=True - ) - - @property - def create_mute(self): - """ :reference: https://dev.twitter.com/rest/reference/post/mutes/users/create - :allowed_param:'id', 'user_id', 'screen_name' - """ - return bind_api( - api=self, - path='/mutes/users/create.json', - method='POST', - payload_type='user', - allowed_param=['id', 'user_id', 'screen_name'], - require_auth=True - ) - - @property - def destroy_mute(self): - """ :reference: https://dev.twitter.com/rest/reference/post/mutes/users/destroy - :allowed_param:'id', 'user_id', 'screen_name' - """ - return bind_api( - api=self, - path='/mutes/users/destroy.json', - method='POST', - payload_type='user', - allowed_param=['id', 'user_id', 'screen_name'], - require_auth=True - ) - - - - @property - def blocks(self): - """ :reference: https://dev.twitter.com/rest/reference/get/blocks/list - :allowed_param:'cursor' - """ - return bind_api( - api=self, - path='/blocks/list.json', - payload_type='user', payload_list=True, - allowed_param=['cursor'], - require_auth=True - ) - - @property - def blocks_ids(self): - """ :reference: https://dev.twitter.com/rest/reference/get/blocks/ids """ - return bind_api( - api=self, - path='/blocks/ids.json', - payload_type='json', - require_auth=True - ) - - @property - def report_spam(self): - """ :reference: https://dev.twitter.com/rest/reference/post/users/report_spam - :allowed_param:'user_id', 'screen_name' - """ - return bind_api( - api=self, - path='/users/report_spam.json', - method='POST', - payload_type='user', - allowed_param=['user_id', 'screen_name'], - require_auth=True - ) - - @property - def saved_searches(self): - """ :reference: https://dev.twitter.com/rest/reference/get/saved_searches/show/%3Aid """ - return bind_api( - api=self, - path='/saved_searches/list.json', - payload_type='saved_search', payload_list=True, - require_auth=True - ) - - @property - def get_saved_search(self): - """ :reference: https://dev.twitter.com/rest/reference/get/saved_searches/show/%3Aid - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/saved_searches/show/{id}.json', - payload_type='saved_search', - allowed_param=['id'], - require_auth=True - ) - - @property - def create_saved_search(self): - """ :reference: https://dev.twitter.com/rest/reference/post/saved_searches/create - :allowed_param:'query' - """ - return bind_api( - api=self, - path='/saved_searches/create.json', - method='POST', - payload_type='saved_search', - allowed_param=['query'], - require_auth=True - ) - - @property - def destroy_saved_search(self): - """ :reference: https://dev.twitter.com/rest/reference/post/saved_searches/destroy/%3Aid - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/saved_searches/destroy/{id}.json', - method='POST', - payload_type='saved_search', - allowed_param=['id'], - require_auth=True - ) - - @property - def create_list(self): - """ :reference: https://dev.twitter.com/rest/reference/post/lists/create - :allowed_param:'name', 'mode', 'description' - """ - return bind_api( - api=self, - path='/lists/create.json', - method='POST', - payload_type='list', - allowed_param=['name', 'mode', 'description'], - require_auth=True - ) - - @property - def destroy_list(self): - """ :reference: https://dev.twitter.com/rest/reference/post/lists/destroy - :allowed_param:'owner_screen_name', 'owner_id', 'list_id', 'slug' - """ - return bind_api( - api=self, - path='/lists/destroy.json', - method='POST', - payload_type='list', - allowed_param=['owner_screen_name', 'owner_id', 'list_id', 'slug'], - require_auth=True - ) - - @property - def update_list(self): - """ :reference: https://dev.twitter.com/rest/reference/post/lists/update - :allowed_param: list_id', 'slug', 'name', 'mode', 'description', 'owner_screen_name', 'owner_id' - """ - return bind_api( - api=self, - path='/lists/update.json', - method='POST', - payload_type='list', - allowed_param=['list_id', 'slug', 'name', 'mode', 'description', 'owner_screen_name', 'owner_id'], - require_auth=True - ) - - @property - def lists_all(self): - """ :reference: https://dev.twitter.com/rest/reference/get/lists/list - :allowed_param:'screen_name', 'user_id' - """ - return bind_api( - api=self, - path='/lists/list.json', - payload_type='list', payload_list=True, - allowed_param=['screen_name', 'user_id'], - require_auth=True - ) - - @property - def lists_memberships(self): - """ :reference: https://dev.twitter.com/rest/reference/get/lists/memberships - :allowed_param:'screen_name', 'user_id', 'filter_to_owned_lists', 'cursor' - """ - return bind_api( - api=self, - path='/lists/memberships.json', - payload_type='list', payload_list=True, - allowed_param=['screen_name', 'user_id', 'filter_to_owned_lists', 'cursor'], - require_auth=True - ) - - @property - def lists_subscriptions(self): - """ :reference: https://dev.twitter.com/rest/reference/get/lists/subscriptions - :allowed_param:'screen_name', 'user_id', 'cursor' - """ - return bind_api( - api=self, - path='/lists/subscriptions.json', - payload_type='list', payload_list=True, - allowed_param=['screen_name', 'user_id', 'cursor'], - require_auth=True - ) - - @property - def list_timeline(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/statuses - :allowed_param:'owner_screen_name', 'slug', 'owner_id', 'list_id', - 'since_id', 'max_id', 'count', 'include_rts - """ - return bind_api( - api=self, - path='/lists/statuses.json', - payload_type='status', payload_list=True, - allowed_param=['owner_screen_name', 'slug', 'owner_id', - 'list_id', 'since_id', 'max_id', 'count', - 'include_rts'] - ) - - @property - def get_list(self): - """ :reference: https://dev.twitter.com/rest/reference/get/lists/show - :allowed_param:'owner_screen_name', 'owner_id', 'slug', 'list_id' - """ - return bind_api( - api=self, - path='/lists/show.json', - payload_type='list', - allowed_param=['owner_screen_name', 'owner_id', 'slug', 'list_id'] - ) - - @property - def add_list_member(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/members/create - :allowed_param:'screen_name', 'user_id', 'owner_screen_name', - 'owner_id', 'slug', 'list_id' - """ - return bind_api( - api=self, - path='/lists/members/create.json', - method='POST', - payload_type='list', - allowed_param=['screen_name', 'user_id', 'owner_screen_name', - 'owner_id', 'slug', 'list_id'], - require_auth=True - ) - - @property - def remove_list_member(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/members/destroy - :allowed_param:'screen_name', 'user_id', 'owner_screen_name', - 'owner_id', 'slug', 'list_id' - """ - return bind_api( - api=self, - path='/lists/members/destroy.json', - method='POST', - payload_type='list', - allowed_param=['screen_name', 'user_id', 'owner_screen_name', - 'owner_id', 'slug', 'list_id'], - require_auth=True - ) - - def add_list_members(self, screen_name=None, user_id=None, slug=None, - list_id=None, owner_id=None, owner_screen_name=None): - """ Perform bulk add of list members from user ID or screenname """ - return self._add_list_members(list_to_csv(screen_name), - list_to_csv(user_id), - slug, list_id, owner_id, - owner_screen_name) - - @property - def _add_list_members(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/members/create_all - :allowed_param:'screen_name', 'user_id', 'slug', 'list_id', - 'owner_id', 'owner_screen_name' - - """ - return bind_api( - api=self, - path='/lists/members/create_all.json', - method='POST', - payload_type='list', - allowed_param=['screen_name', 'user_id', 'slug', 'list_id', - 'owner_id', 'owner_screen_name'], - require_auth=True - ) - - def remove_list_members(self, screen_name=None, user_id=None, slug=None, - list_id=None, owner_id=None, owner_screen_name=None): - """ Perform bulk remove of list members from user ID or screenname """ - return self._remove_list_members(list_to_csv(screen_name), - list_to_csv(user_id), - slug, list_id, owner_id, - owner_screen_name) - - @property - def _remove_list_members(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/members/destroy_all - :allowed_param:'screen_name', 'user_id', 'slug', 'list_id', - 'owner_id', 'owner_screen_name' - - """ - return bind_api( - api=self, - path='/lists/members/destroy_all.json', - method='POST', - payload_type='list', - allowed_param=['screen_name', 'user_id', 'slug', 'list_id', - 'owner_id', 'owner_screen_name'], - require_auth=True - ) - - @property - def list_members(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/members - :allowed_param:'owner_screen_name', 'slug', 'list_id', - 'owner_id', 'cursor - """ - return bind_api( - api=self, - path='/lists/members.json', - payload_type='user', payload_list=True, - allowed_param=['owner_screen_name', 'slug', 'list_id', - 'owner_id', 'cursor'] - ) - - @property - def show_list_member(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/members/show - :allowed_param:'list_id', 'slug', 'user_id', 'screen_name', - 'owner_screen_name', 'owner_id - """ - return bind_api( - api=self, - path='/lists/members/show.json', - payload_type='user', - allowed_param=['list_id', 'slug', 'user_id', 'screen_name', - 'owner_screen_name', 'owner_id'] - ) - - @property - def subscribe_list(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/subscribers/create - :allowed_param:'owner_screen_name', 'slug', 'owner_id', - 'list_id' - """ - return bind_api( - api=self, - path='/lists/subscribers/create.json', - method='POST', - payload_type='list', - allowed_param=['owner_screen_name', 'slug', 'owner_id', - 'list_id'], - require_auth=True - ) - - @property - def unsubscribe_list(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/subscribers/destroy - :allowed_param:'owner_screen_name', 'slug', 'owner_id', - 'list_id' - """ - return bind_api( - api=self, - path='/lists/subscribers/destroy.json', - method='POST', - payload_type='list', - allowed_param=['owner_screen_name', 'slug', 'owner_id', - 'list_id'], - require_auth=True - ) - - @property - def list_subscribers(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/subscribers - :allowed_param:'owner_screen_name', 'slug', 'owner_id', - 'list_id', 'cursor - """ - return bind_api( - api=self, - path='/lists/subscribers.json', - payload_type='user', payload_list=True, - allowed_param=['owner_screen_name', 'slug', 'owner_id', - 'list_id', 'cursor'] - ) - - @property - def show_list_subscriber(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/subscribers/show - :allowed_param:'owner_screen_name', 'slug', 'screen_name', - 'owner_id', 'list_id', 'user_id - """ - return bind_api( - api=self, - path='/lists/subscribers/show.json', - payload_type='user', - allowed_param=['owner_screen_name', 'slug', 'screen_name', - 'owner_id', 'list_id', 'user_id'] - ) - - @property - def trends_available(self): - """ :reference: https://dev.twitter.com/rest/reference/get/trends/available """ - return bind_api( - api=self, - path='/trends/available.json', - payload_type='json' - ) - - @property - def trends_place(self): - """ :reference: https://dev.twitter.com/rest/reference/get/trends/place - :allowed_param:'id', 'exclude' - """ - return bind_api( - api=self, - path='/trends/place.json', - payload_type='json', - allowed_param=['id', 'exclude'] - ) - - @property - def trends_closest(self): - """ :reference: https://dev.twitter.com/rest/reference/get/trends/closest - :allowed_param:'lat', 'long' - """ - return bind_api( - api=self, - path='/trends/closest.json', - payload_type='json', - allowed_param=['lat', 'long'] - ) - - @property - def search(self): - """ :reference: https://dev.twitter.com/rest/reference/get/search/tweets - :allowed_param:'q', 'lang', 'locale', 'since_id', 'geocode', - 'max_id', 'since', 'until', 'result_type', 'count', - 'include_entities', 'from', 'to', 'source' - """ - return bind_api( - api=self, - path='/search/tweets.json', - payload_type='search_results', - allowed_param=['q', 'lang', 'locale', 'since_id', 'geocode', - 'max_id', 'since', 'until', 'result_type', - 'count', 'include_entities', 'from', - 'to', 'source'] - ) - - @property - def reverse_geocode(self): - """ :reference: https://dev.twitter.com/rest/reference/get/geo/reverse_geocode - :allowed_param:'lat', 'long', 'accuracy', 'granularity', 'max_results' - """ - return bind_api( - api=self, - path='/geo/reverse_geocode.json', - payload_type='place', payload_list=True, - allowed_param=['lat', 'long', 'accuracy', 'granularity', - 'max_results'] - ) - - @property - def geo_id(self): - """ :reference: https://dev.twitter.com/rest/reference/get/geo/id/%3Aplace_id - :allowed_param:'id' - """ - return bind_api( - api=self, - path='/geo/id/{id}.json', - payload_type='place', - allowed_param=['id'] - ) - - @property - def geo_search(self): - """ :reference: https://dev.twitter.com/docs/api/1.1/get/geo/search - :allowed_param:'lat', 'long', 'query', 'ip', 'granularity', - 'accuracy', 'max_results', 'contained_within - - """ - return bind_api( - api=self, - path='/geo/search.json', - payload_type='place', payload_list=True, - allowed_param=['lat', 'long', 'query', 'ip', 'granularity', - 'accuracy', 'max_results', 'contained_within'] - ) - - @property - def geo_similar_places(self): - """ :reference: https://dev.twitter.com/rest/reference/get/geo/similar_places - :allowed_param:'lat', 'long', 'name', 'contained_within' - """ - return bind_api( - api=self, - path='/geo/similar_places.json', - payload_type='place', payload_list=True, - allowed_param=['lat', 'long', 'name', 'contained_within'] - ) - - @property - def supported_languages(self): - """ :reference: https://dev.twitter.com/rest/reference/get/help/languages """ - return bind_api( - api=self, - path='/help/languages.json', - payload_type='json', - require_auth=True - ) - - @property - def configuration(self): - """ :reference: https://dev.twitter.com/rest/reference/get/help/configuration """ - return bind_api( - api=self, - path='/help/configuration.json', - payload_type='json', - require_auth=True - ) - - """ Internal use only """ - - @staticmethod - def _pack_image(filename, max_size, form_field="image", f=None): - """Pack image from file into multipart-formdata post body""" - # image must be less than 700kb in size - if f is None: - try: - if os.path.getsize(filename) > (max_size * 1024): - raise TweepError('File is too big, must be less than %skb.' % max_size) - except os.error as e: - raise TweepError('Unable to access file: %s' % e.strerror) - - # build the mulitpart-formdata body - fp = open(filename, 'rb') - else: - f.seek(0, 2) # Seek to end of file - if f.tell() > (max_size * 1024): - raise TweepError('File is too big, must be less than %skb.' % max_size) - f.seek(0) # Reset to beginning of file - fp = f - - # image must be gif, jpeg, or png - file_type = mimetypes.guess_type(filename) - if file_type is None: - raise TweepError('Could not determine file type') - file_type = file_type[0] - if file_type not in ['image/gif', 'image/jpeg', 'image/png']: - raise TweepError('Invalid file type for image: %s' % file_type) - - if isinstance(filename, six.text_type): - filename = filename.encode("utf-8") - - BOUNDARY = b'Tw3ePy' - body = [] - body.append(b'--' + BOUNDARY) - body.append('Content-Disposition: form-data; name="{0}";' - ' filename="{1}"'.format(form_field, filename) - .encode('utf-8')) - body.append('Content-Type: {0}'.format(file_type).encode('utf-8')) - body.append(b'') - body.append(fp.read()) - body.append(b'--' + BOUNDARY + b'--') - body.append(b'') - fp.close() - body = b'\r\n'.join(body) - - # build headers - headers = { - 'Content-Type': 'multipart/form-data; boundary=Tw3ePy', - 'Content-Length': str(len(body)) - } - - return headers, body diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/auth.py b/libs/apprise/plugins/NotifyTwitter/tweepy/auth.py deleted file mode 100644 index b450e3105..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/auth.py +++ /dev/null @@ -1,178 +0,0 @@ -from __future__ import print_function - -import six -import logging - -from .error import TweepError -from .api import API -import requests -from requests_oauthlib import OAuth1Session, OAuth1 -from requests.auth import AuthBase -from six.moves.urllib.parse import parse_qs - -WARNING_MESSAGE = """Warning! Due to a Twitter API bug, signin_with_twitter -and access_type don't always play nice together. Details -https://dev.twitter.com/discussions/21281""" - - -class AuthHandler(object): - - def apply_auth(self, url, method, headers, parameters): - """Apply authentication headers to request""" - raise NotImplementedError - - def get_username(self): - """Return the username of the authenticated user""" - raise NotImplementedError - - -class OAuthHandler(AuthHandler): - """OAuth authentication handler""" - OAUTH_HOST = 'api.twitter.com' - OAUTH_ROOT = '/oauth/' - - def __init__(self, consumer_key, consumer_secret, callback=None): - if type(consumer_key) == six.text_type: - consumer_key = consumer_key.encode('ascii') - - if type(consumer_secret) == six.text_type: - consumer_secret = consumer_secret.encode('ascii') - - self.consumer_key = consumer_key - self.consumer_secret = consumer_secret - self.access_token = None - self.access_token_secret = None - self.callback = callback - self.username = None - self.oauth = OAuth1Session(consumer_key, - client_secret=consumer_secret, - callback_uri=self.callback) - - def _get_oauth_url(self, endpoint): - return 'https://' + self.OAUTH_HOST + self.OAUTH_ROOT + endpoint - - def apply_auth(self): - return OAuth1(self.consumer_key, - client_secret=self.consumer_secret, - resource_owner_key=self.access_token, - resource_owner_secret=self.access_token_secret, - decoding=None) - - def _get_request_token(self, access_type=None): - try: - url = self._get_oauth_url('request_token') - if access_type: - url += '?x_auth_access_type=%s' % access_type - return self.oauth.fetch_request_token(url) - except Exception as e: - raise TweepError(e) - - def set_access_token(self, key, secret): - self.access_token = key - self.access_token_secret = secret - - def get_authorization_url(self, - signin_with_twitter=False, - access_type=None): - """Get the authorization URL to redirect the user""" - try: - if signin_with_twitter: - url = self._get_oauth_url('authenticate') - if access_type: - logging.warning(WARNING_MESSAGE) - else: - url = self._get_oauth_url('authorize') - self.request_token = self._get_request_token(access_type=access_type) - return self.oauth.authorization_url(url) - except Exception as e: - raise TweepError(e) - - def get_access_token(self, verifier=None): - """ - After user has authorized the request token, get access token - with user supplied verifier. - """ - try: - url = self._get_oauth_url('access_token') - self.oauth = OAuth1Session(self.consumer_key, - client_secret=self.consumer_secret, - resource_owner_key=self.request_token['oauth_token'], - resource_owner_secret=self.request_token['oauth_token_secret'], - verifier=verifier, callback_uri=self.callback) - resp = self.oauth.fetch_access_token(url) - self.access_token = resp['oauth_token'] - self.access_token_secret = resp['oauth_token_secret'] - return self.access_token, self.access_token_secret - except Exception as e: - raise TweepError(e) - - def get_xauth_access_token(self, username, password): - """ - Get an access token from an username and password combination. - In order to get this working you need to create an app at - http://twitter.com/apps, after that send a mail to [email protected] - and request activation of xAuth for it. - """ - try: - url = self._get_oauth_url('access_token') - oauth = OAuth1(self.consumer_key, - client_secret=self.consumer_secret) - r = requests.post(url=url, - auth=oauth, - headers={'x_auth_mode': 'client_auth', - 'x_auth_username': username, - 'x_auth_password': password}) - - credentials = parse_qs(r.content) - return credentials.get('oauth_token')[0], credentials.get('oauth_token_secret')[0] - except Exception as e: - raise TweepError(e) - - def get_username(self): - if self.username is None: - api = API(self) - user = api.verify_credentials() - if user: - self.username = user.screen_name - else: - raise TweepError('Unable to get username,' - ' invalid oauth token!') - return self.username - - -class OAuth2Bearer(AuthBase): - def __init__(self, bearer_token): - self.bearer_token = bearer_token - - def __call__(self, request): - request.headers['Authorization'] = 'Bearer ' + self.bearer_token - return request - - -class AppAuthHandler(AuthHandler): - """Application-only authentication handler""" - - OAUTH_HOST = 'api.twitter.com' - OAUTH_ROOT = '/oauth2/' - - def __init__(self, consumer_key, consumer_secret): - self.consumer_key = consumer_key - self.consumer_secret = consumer_secret - self._bearer_token = '' - - resp = requests.post(self._get_oauth_url('token'), - auth=(self.consumer_key, - self.consumer_secret), - data={'grant_type': 'client_credentials'}) - data = resp.json() - if data.get('token_type') != 'bearer': - raise TweepError('Expected token_type to equal "bearer", ' - 'but got %s instead' % data.get('token_type')) - - self._bearer_token = data['access_token'] - - def _get_oauth_url(self, endpoint): - return 'https://' + self.OAUTH_HOST + self.OAUTH_ROOT + endpoint - - def apply_auth(self): - return OAuth2Bearer(self._bearer_token) diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/binder.py b/libs/apprise/plugins/NotifyTwitter/tweepy/binder.py deleted file mode 100644 index ff807313b..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/binder.py +++ /dev/null @@ -1,261 +0,0 @@ -# Tweepy -# Copyright 2009-2010 Joshua Roesslein -# See LICENSE for details. - -from __future__ import print_function - -import time -import re - -from six.moves.urllib.parse import quote, urlencode -import requests - -import logging - -from .error import TweepError, RateLimitError, is_rate_limit_error_message -from .utils import convert_to_utf8_str -from .models import Model -import six -import sys - - -re_path_template = re.compile(r'{\w+}') - -log = logging.getLogger('tweepy.binder') - -def bind_api(**config): - - class APIMethod(object): - - api = config['api'] - path = config['path'] - payload_type = config.get('payload_type', None) - payload_list = config.get('payload_list', False) - allowed_param = config.get('allowed_param', []) - method = config.get('method', 'GET') - require_auth = config.get('require_auth', False) - search_api = config.get('search_api', False) - upload_api = config.get('upload_api', False) - use_cache = config.get('use_cache', True) - session = requests.Session() - - def __init__(self, args, kwargs): - api = self.api - # If authentication is required and no credentials - # are provided, throw an error. - if self.require_auth and not api.auth: - raise TweepError('Authentication required!') - - self.post_data = kwargs.pop('post_data', None) - self.retry_count = kwargs.pop('retry_count', - api.retry_count) - self.retry_delay = kwargs.pop('retry_delay', - api.retry_delay) - self.retry_errors = kwargs.pop('retry_errors', - api.retry_errors) - self.wait_on_rate_limit = kwargs.pop('wait_on_rate_limit', - api.wait_on_rate_limit) - self.wait_on_rate_limit_notify = kwargs.pop('wait_on_rate_limit_notify', - api.wait_on_rate_limit_notify) - self.parser = kwargs.pop('parser', api.parser) - self.session.headers = kwargs.pop('headers', {}) - self.build_parameters(args, kwargs) - - # Pick correct URL root to use - if self.search_api: - self.api_root = api.search_root - elif self.upload_api: - self.api_root = api.upload_root - else: - self.api_root = api.api_root - - # Perform any path variable substitution - self.build_path() - - if self.search_api: - self.host = api.search_host - elif self.upload_api: - self.host = api.upload_host - else: - self.host = api.host - - # Manually set Host header to fix an issue in python 2.5 - # or older where Host is set including the 443 port. - # This causes Twitter to issue 301 redirect. - # See Issue https://github.com/tweepy/tweepy/issues/12 - self.session.headers['Host'] = self.host - # Monitoring rate limits - self._remaining_calls = None - self._reset_time = None - - def build_parameters(self, args, kwargs): - self.session.params = {} - for idx, arg in enumerate(args): - if arg is None: - continue - try: - self.session.params[self.allowed_param[idx]] = convert_to_utf8_str(arg) - except IndexError: - raise TweepError('Too many parameters supplied!') - - for k, arg in kwargs.items(): - if arg is None: - continue - if k in self.session.params: - raise TweepError('Multiple values for parameter %s supplied!' % k) - - self.session.params[k] = convert_to_utf8_str(arg) - - log.debug("PARAMS: %r", self.session.params) - - def build_path(self): - for variable in re_path_template.findall(self.path): - name = variable.strip('{}') - - if name == 'user' and 'user' not in self.session.params and self.api.auth: - # No 'user' parameter provided, fetch it from Auth instead. - value = self.api.auth.get_username() - else: - try: - value = quote(self.session.params[name]) - except KeyError: - raise TweepError('No parameter value found for path variable: %s' % name) - del self.session.params[name] - - self.path = self.path.replace(variable, value) - - def execute(self): - self.api.cached_result = False - - # Build the request URL - url = self.api_root + self.path - full_url = 'https://' + self.host + url - - # Query the cache if one is available - # and this request uses a GET method. - if self.use_cache and self.api.cache and self.method == 'GET': - cache_result = self.api.cache.get('%s?%s' % (url, urlencode(self.session.params))) - # if cache result found and not expired, return it - if cache_result: - # must restore api reference - if isinstance(cache_result, list): - for result in cache_result: - if isinstance(result, Model): - result._api = self.api - else: - if isinstance(cache_result, Model): - cache_result._api = self.api - self.api.cached_result = True - return cache_result - - # Continue attempting request until successful - # or maximum number of retries is reached. - retries_performed = 0 - while retries_performed < self.retry_count + 1: - # handle running out of api calls - if self.wait_on_rate_limit: - if self._reset_time is not None: - if self._remaining_calls is not None: - if self._remaining_calls < 1: - sleep_time = self._reset_time - int(time.time()) - if sleep_time > 0: - if self.wait_on_rate_limit_notify: - log.warning("Rate limit reached. Sleeping for: %d" % sleep_time) - time.sleep(sleep_time + 5) # sleep for few extra sec - - # if self.wait_on_rate_limit and self._reset_time is not None and \ - # self._remaining_calls is not None and self._remaining_calls < 1: - # sleep_time = self._reset_time - int(time.time()) - # if sleep_time > 0: - # if self.wait_on_rate_limit_notify: - # log.warning("Rate limit reached. Sleeping for: %d" % sleep_time) - # time.sleep(sleep_time + 5) # sleep for few extra sec - - # Apply authentication - auth = None - if self.api.auth: - auth = self.api.auth.apply_auth() - - # Request compression if configured - if self.api.compression: - self.session.headers['Accept-encoding'] = 'gzip' - - # Execute request - try: - resp = self.session.request(self.method, - full_url, - data=self.post_data, - timeout=self.api.timeout, - auth=auth, - proxies=self.api.proxy) - except Exception as e: - six.reraise(TweepError, TweepError('Failed to send request: %s' % e), sys.exc_info()[2]) - - rem_calls = resp.headers.get('x-rate-limit-remaining') - - if rem_calls is not None: - self._remaining_calls = int(rem_calls) - elif isinstance(self._remaining_calls, int): - self._remaining_calls -= 1 - reset_time = resp.headers.get('x-rate-limit-reset') - if reset_time is not None: - self._reset_time = int(reset_time) - if self.wait_on_rate_limit and self._remaining_calls == 0 and ( - # if ran out of calls before waiting switching retry last call - resp.status_code == 429 or resp.status_code == 420): - continue - retry_delay = self.retry_delay - # Exit request loop if non-retry error code - if resp.status_code == 200: - break - elif (resp.status_code == 429 or resp.status_code == 420) and self.wait_on_rate_limit: - if 'retry-after' in resp.headers: - retry_delay = float(resp.headers['retry-after']) - elif self.retry_errors and resp.status_code not in self.retry_errors: - break - - # Sleep before retrying request again - time.sleep(retry_delay) - retries_performed += 1 - - # If an error was returned, throw an exception - self.api.last_response = resp - if resp.status_code and not 200 <= resp.status_code < 300: - try: - error_msg, api_error_code = \ - self.parser.parse_error(resp.text) - except Exception: - error_msg = "Twitter error response: status code = %s" % resp.status_code - api_error_code = None - - if is_rate_limit_error_message(error_msg): - raise RateLimitError(error_msg, resp) - else: - raise TweepError(error_msg, resp, api_code=api_error_code) - - # Parse the response payload - result = self.parser.parse(self, resp.text) - - # Store result into cache if one is available. - if self.use_cache and self.api.cache and self.method == 'GET' and result: - self.api.cache.store('%s?%s' % (url, urlencode(self.session.params)), result) - - return result - - def _call(*args, **kwargs): - method = APIMethod(args, kwargs) - if kwargs.get('create'): - return method - else: - return method.execute() - - # Set pagination mode - if 'cursor' in APIMethod.allowed_param: - _call.pagination_mode = 'cursor' - elif 'max_id' in APIMethod.allowed_param: - if 'since_id' in APIMethod.allowed_param: - _call.pagination_mode = 'id' - elif 'page' in APIMethod.allowed_param: - _call.pagination_mode = 'page' - - return _call diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/cache.py b/libs/apprise/plugins/NotifyTwitter/tweepy/cache.py deleted file mode 100644 index 8c2878166..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/cache.py +++ /dev/null @@ -1,432 +0,0 @@ -# Tweepy -# Copyright 2009-2010 Joshua Roesslein -# See LICENSE for details. - -from __future__ import print_function - -import time -import datetime -import hashlib -import threading -import os -import logging - -try: - import cPickle as pickle -except ImportError: - import pickle - -try: - import fcntl -except ImportError: - # Probably on a windows system - # TODO: use win32file - pass - -log = logging.getLogger('tweepy.cache') - -class Cache(object): - """Cache interface""" - - def __init__(self, timeout=60): - """Initialize the cache - timeout: number of seconds to keep a cached entry - """ - self.timeout = timeout - - def store(self, key, value): - """Add new record to cache - key: entry key - value: data of entry - """ - raise NotImplementedError - - def get(self, key, timeout=None): - """Get cached entry if exists and not expired - key: which entry to get - timeout: override timeout with this value [optional] - """ - raise NotImplementedError - - def count(self): - """Get count of entries currently stored in cache""" - raise NotImplementedError - - def cleanup(self): - """Delete any expired entries in cache.""" - raise NotImplementedError - - def flush(self): - """Delete all cached entries""" - raise NotImplementedError - - -class MemoryCache(Cache): - """In-memory cache""" - - def __init__(self, timeout=60): - Cache.__init__(self, timeout) - self._entries = {} - self.lock = threading.Lock() - - def __getstate__(self): - # pickle - return {'entries': self._entries, 'timeout': self.timeout} - - def __setstate__(self, state): - # unpickle - self.lock = threading.Lock() - self._entries = state['entries'] - self.timeout = state['timeout'] - - def _is_expired(self, entry, timeout): - return timeout > 0 and (time.time() - entry[0]) >= timeout - - def store(self, key, value): - self.lock.acquire() - self._entries[key] = (time.time(), value) - self.lock.release() - - def get(self, key, timeout=None): - self.lock.acquire() - try: - # check to see if we have this key - entry = self._entries.get(key) - if not entry: - # no hit, return nothing - return None - - # use provided timeout in arguments if provided - # otherwise use the one provided during init. - if timeout is None: - timeout = self.timeout - - # make sure entry is not expired - if self._is_expired(entry, timeout): - # entry expired, delete and return nothing - del self._entries[key] - return None - - # entry found and not expired, return it - return entry[1] - finally: - self.lock.release() - - def count(self): - return len(self._entries) - - def cleanup(self): - self.lock.acquire() - try: - for k, v in dict(self._entries).items(): - if self._is_expired(v, self.timeout): - del self._entries[k] - finally: - self.lock.release() - - def flush(self): - self.lock.acquire() - self._entries.clear() - self.lock.release() - - -class FileCache(Cache): - """File-based cache""" - - # locks used to make cache thread-safe - cache_locks = {} - - def __init__(self, cache_dir, timeout=60): - Cache.__init__(self, timeout) - if os.path.exists(cache_dir) is False: - os.mkdir(cache_dir) - self.cache_dir = cache_dir - if cache_dir in FileCache.cache_locks: - self.lock = FileCache.cache_locks[cache_dir] - else: - self.lock = threading.Lock() - FileCache.cache_locks[cache_dir] = self.lock - - if os.name == 'posix': - self._lock_file = self._lock_file_posix - self._unlock_file = self._unlock_file_posix - elif os.name == 'nt': - self._lock_file = self._lock_file_win32 - self._unlock_file = self._unlock_file_win32 - else: - log.warning('FileCache locking not supported on this system!') - self._lock_file = self._lock_file_dummy - self._unlock_file = self._unlock_file_dummy - - def _get_path(self, key): - md5 = hashlib.md5() - md5.update(key.encode('utf-8')) - return os.path.join(self.cache_dir, md5.hexdigest()) - - def _lock_file_dummy(self, path, exclusive=True): - return None - - def _unlock_file_dummy(self, lock): - return - - def _lock_file_posix(self, path, exclusive=True): - lock_path = path + '.lock' - if exclusive is True: - f_lock = open(lock_path, 'w') - fcntl.lockf(f_lock, fcntl.LOCK_EX) - else: - f_lock = open(lock_path, 'r') - fcntl.lockf(f_lock, fcntl.LOCK_SH) - if os.path.exists(lock_path) is False: - f_lock.close() - return None - return f_lock - - def _unlock_file_posix(self, lock): - lock.close() - - def _lock_file_win32(self, path, exclusive=True): - # TODO: implement - return None - - def _unlock_file_win32(self, lock): - # TODO: implement - return - - def _delete_file(self, path): - os.remove(path) - if os.path.exists(path + '.lock'): - os.remove(path + '.lock') - - def store(self, key, value): - path = self._get_path(key) - self.lock.acquire() - try: - # acquire lock and open file - f_lock = self._lock_file(path) - datafile = open(path, 'wb') - - # write data - pickle.dump((time.time(), value), datafile) - - # close and unlock file - datafile.close() - self._unlock_file(f_lock) - finally: - self.lock.release() - - def get(self, key, timeout=None): - return self._get(self._get_path(key), timeout) - - def _get(self, path, timeout): - if os.path.exists(path) is False: - # no record - return None - self.lock.acquire() - try: - # acquire lock and open - f_lock = self._lock_file(path, False) - datafile = open(path, 'rb') - - # read pickled object - created_time, value = pickle.load(datafile) - datafile.close() - - # check if value is expired - if timeout is None: - timeout = self.timeout - if timeout > 0: - if (time.time() - created_time) >= timeout: - # expired! delete from cache - value = None - self._delete_file(path) - - # unlock and return result - self._unlock_file(f_lock) - return value - finally: - self.lock.release() - - def count(self): - c = 0 - for entry in os.listdir(self.cache_dir): - if entry.endswith('.lock'): - continue - c += 1 - return c - - def cleanup(self): - for entry in os.listdir(self.cache_dir): - if entry.endswith('.lock'): - continue - self._get(os.path.join(self.cache_dir, entry), None) - - def flush(self): - for entry in os.listdir(self.cache_dir): - if entry.endswith('.lock'): - continue - self._delete_file(os.path.join(self.cache_dir, entry)) - - -class MemCacheCache(Cache): - """Cache interface""" - - def __init__(self, client, timeout=60): - """Initialize the cache - client: The memcache client - timeout: number of seconds to keep a cached entry - """ - self.client = client - self.timeout = timeout - - def store(self, key, value): - """Add new record to cache - key: entry key - value: data of entry - """ - self.client.set(key, value, time=self.timeout) - - def get(self, key, timeout=None): - """Get cached entry if exists and not expired - key: which entry to get - timeout: override timeout with this value [optional]. - DOES NOT WORK HERE - """ - return self.client.get(key) - - def count(self): - """Get count of entries currently stored in cache. RETURN 0""" - raise NotImplementedError - - def cleanup(self): - """Delete any expired entries in cache. NO-OP""" - raise NotImplementedError - - def flush(self): - """Delete all cached entries. NO-OP""" - raise NotImplementedError - - -class RedisCache(Cache): - """Cache running in a redis server""" - - def __init__(self, client, - timeout=60, - keys_container='tweepy:keys', - pre_identifier='tweepy:'): - Cache.__init__(self, timeout) - self.client = client - self.keys_container = keys_container - self.pre_identifier = pre_identifier - - def _is_expired(self, entry, timeout): - # Returns true if the entry has expired - return timeout > 0 and (time.time() - entry[0]) >= timeout - - def store(self, key, value): - """Store the key, value pair in our redis server""" - # Prepend tweepy to our key, - # this makes it easier to identify tweepy keys in our redis server - key = self.pre_identifier + key - # Get a pipe (to execute several redis commands in one step) - pipe = self.client.pipeline() - # Set our values in a redis hash (similar to python dict) - pipe.set(key, pickle.dumps((time.time(), value))) - # Set the expiration - pipe.expire(key, self.timeout) - # Add the key to a set containing all the keys - pipe.sadd(self.keys_container, key) - # Execute the instructions in the redis server - pipe.execute() - - def get(self, key, timeout=None): - """Given a key, returns an element from the redis table""" - key = self.pre_identifier + key - # Check to see if we have this key - unpickled_entry = self.client.get(key) - if not unpickled_entry: - # No hit, return nothing - return None - - entry = pickle.loads(unpickled_entry) - # Use provided timeout in arguments if provided - # otherwise use the one provided during init. - if timeout is None: - timeout = self.timeout - - # Make sure entry is not expired - if self._is_expired(entry, timeout): - # entry expired, delete and return nothing - self.delete_entry(key) - return None - # entry found and not expired, return it - return entry[1] - - def count(self): - """Note: This is not very efficient, - since it retreives all the keys from the redis - server to know how many keys we have""" - return len(self.client.smembers(self.keys_container)) - - def delete_entry(self, key): - """Delete an object from the redis table""" - pipe = self.client.pipeline() - pipe.srem(self.keys_container, key) - pipe.delete(key) - pipe.execute() - - def cleanup(self): - """Cleanup all the expired keys""" - keys = self.client.smembers(self.keys_container) - for key in keys: - entry = self.client.get(key) - if entry: - entry = pickle.loads(entry) - if self._is_expired(entry, self.timeout): - self.delete_entry(key) - - def flush(self): - """Delete all entries from the cache""" - keys = self.client.smembers(self.keys_container) - for key in keys: - self.delete_entry(key) - - -class MongodbCache(Cache): - """A simple pickle-based MongoDB cache sytem.""" - - def __init__(self, db, timeout=3600, collection='tweepy_cache'): - """Should receive a "database" cursor from pymongo.""" - Cache.__init__(self, timeout) - self.timeout = timeout - self.col = db[collection] - self.col.create_index('created', expireAfterSeconds=timeout) - - def store(self, key, value): - from bson.binary import Binary - - now = datetime.datetime.utcnow() - blob = Binary(pickle.dumps(value)) - - self.col.insert({'created': now, '_id': key, 'value': blob}) - - def get(self, key, timeout=None): - if timeout: - raise NotImplementedError - obj = self.col.find_one({'_id': key}) - if obj: - return pickle.loads(obj['value']) - - def count(self): - return self.col.find({}).count() - - def delete_entry(self, key): - return self.col.remove({'_id': key}) - - def cleanup(self): - """MongoDB will automatically clear expired keys.""" - pass - - def flush(self): - self.col.drop() - self.col.create_index('created', expireAfterSeconds=self.timeout) diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/cursor.py b/libs/apprise/plugins/NotifyTwitter/tweepy/cursor.py deleted file mode 100644 index 3ab28c28f..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/cursor.py +++ /dev/null @@ -1,214 +0,0 @@ -# Tweepy -# Copyright 2009-2010 Joshua Roesslein -# See LICENSE for details. - -from __future__ import print_function - -from .error import TweepError -from .parsers import ModelParser, RawParser - - -class Cursor(object): - """Pagination helper class""" - - def __init__(self, method, *args, **kargs): - if hasattr(method, 'pagination_mode'): - if method.pagination_mode == 'cursor': - self.iterator = CursorIterator(method, args, kargs) - elif method.pagination_mode == 'id': - self.iterator = IdIterator(method, args, kargs) - elif method.pagination_mode == 'page': - self.iterator = PageIterator(method, args, kargs) - else: - raise TweepError('Invalid pagination mode.') - else: - raise TweepError('This method does not perform pagination') - - def pages(self, limit=0): - """Return iterator for pages""" - if limit > 0: - self.iterator.limit = limit - return self.iterator - - def items(self, limit=0): - """Return iterator for items in each page""" - i = ItemIterator(self.iterator) - i.limit = limit - return i - - -class BaseIterator(object): - - def __init__(self, method, args, kargs): - self.method = method - self.args = args - self.kargs = kargs - self.limit = 0 - - def __next__(self): - return self.next() - - def next(self): - raise NotImplementedError - - def prev(self): - raise NotImplementedError - - def __iter__(self): - return self - - -class CursorIterator(BaseIterator): - - def __init__(self, method, args, kargs): - BaseIterator.__init__(self, method, args, kargs) - start_cursor = kargs.pop('cursor', None) - self.next_cursor = start_cursor or -1 - self.prev_cursor = start_cursor or 0 - self.num_tweets = 0 - - def next(self): - if self.next_cursor == 0 or (self.limit and self.num_tweets == self.limit): - raise StopIteration - data, cursors = self.method(cursor=self.next_cursor, - *self.args, - **self.kargs) - self.prev_cursor, self.next_cursor = cursors - if len(data) == 0: - raise StopIteration - self.num_tweets += 1 - return data - - def prev(self): - if self.prev_cursor == 0: - raise TweepError('Can not page back more, at first page') - data, self.next_cursor, self.prev_cursor = self.method(cursor=self.prev_cursor, - *self.args, - **self.kargs) - self.num_tweets -= 1 - return data - - -class IdIterator(BaseIterator): - - def __init__(self, method, args, kargs): - BaseIterator.__init__(self, method, args, kargs) - self.max_id = kargs.pop('max_id', None) - self.num_tweets = 0 - self.results = [] - self.model_results = [] - self.index = 0 - - def next(self): - """Fetch a set of items with IDs less than current set.""" - if self.limit and self.limit == self.num_tweets: - raise StopIteration - - if self.index >= len(self.results) - 1: - data = self.method(max_id=self.max_id, parser=RawParser(), *self.args, **self.kargs) - - if hasattr(self.method, '__self__'): - old_parser = self.method.__self__.parser - # Hack for models which expect ModelParser to be set - self.method.__self__.parser = ModelParser() - - # This is a special invocation that returns the underlying - # APIMethod class - model = ModelParser().parse(self.method(create=True), data) - if hasattr(self.method, '__self__'): - self.method.__self__.parser = old_parser - result = self.method.__self__.parser.parse(self.method(create=True), data) - else: - result = model - - if len(self.results) != 0: - self.index += 1 - self.results.append(result) - self.model_results.append(model) - else: - self.index += 1 - result = self.results[self.index] - model = self.model_results[self.index] - - if len(result) == 0: - raise StopIteration - # TODO: Make this not dependant on the parser making max_id and - # since_id available - self.max_id = model.max_id - self.num_tweets += 1 - return result - - def prev(self): - """Fetch a set of items with IDs greater than current set.""" - if self.limit and self.limit == self.num_tweets: - raise StopIteration - - self.index -= 1 - if self.index < 0: - # There's no way to fetch a set of tweets directly 'above' the - # current set - raise StopIteration - - data = self.results[self.index] - self.max_id = self.model_results[self.index].max_id - self.num_tweets += 1 - return data - - -class PageIterator(BaseIterator): - - def __init__(self, method, args, kargs): - BaseIterator.__init__(self, method, args, kargs) - self.current_page = 0 - - def next(self): - if self.limit > 0: - if self.current_page > self.limit: - raise StopIteration - - items = self.method(page=self.current_page, *self.args, **self.kargs) - if len(items) == 0: - raise StopIteration - self.current_page += 1 - return items - - def prev(self): - if self.current_page == 1: - raise TweepError('Can not page back more, at first page') - self.current_page -= 1 - return self.method(page=self.current_page, *self.args, **self.kargs) - - -class ItemIterator(BaseIterator): - - def __init__(self, page_iterator): - self.page_iterator = page_iterator - self.limit = 0 - self.current_page = None - self.page_index = -1 - self.num_tweets = 0 - - def next(self): - if self.limit > 0: - if self.num_tweets == self.limit: - raise StopIteration - if self.current_page is None or self.page_index == len(self.current_page) - 1: - # Reached end of current page, get the next page... - self.current_page = self.page_iterator.next() - self.page_index = -1 - self.page_index += 1 - self.num_tweets += 1 - return self.current_page[self.page_index] - - def prev(self): - if self.current_page is None: - raise TweepError('Can not go back more, at first page') - if self.page_index == 0: - # At the beginning of the current page, move to next... - self.current_page = self.page_iterator.prev() - self.page_index = len(self.current_page) - if self.page_index == 0: - raise TweepError('No more items') - self.page_index -= 1 - self.num_tweets -= 1 - return self.current_page[self.page_index] diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/error.py b/libs/apprise/plugins/NotifyTwitter/tweepy/error.py deleted file mode 100644 index f7d589445..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/error.py +++ /dev/null @@ -1,34 +0,0 @@ -# Tweepy -# Copyright 2009-2010 Joshua Roesslein -# See LICENSE for details. - -from __future__ import print_function - -import six - -class TweepError(Exception): - """Tweepy exception""" - - def __init__(self, reason, response=None, api_code=None): - self.reason = six.text_type(reason) - self.response = response - self.api_code = api_code - Exception.__init__(self, reason) - - def __str__(self): - return self.reason - - -def is_rate_limit_error_message(message): - """Check if the supplied error message belongs to a rate limit error.""" - return isinstance(message, list) \ - and len(message) > 0 \ - and 'code' in message[0] \ - and message[0]['code'] == 88 - - -class RateLimitError(TweepError): - """Exception for Tweepy hitting the rate limit.""" - # RateLimitError has the exact same properties and inner workings - # as TweepError for backwards compatibility reasons. - pass diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/models.py b/libs/apprise/plugins/NotifyTwitter/tweepy/models.py deleted file mode 100644 index 4b2a66c59..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/models.py +++ /dev/null @@ -1,495 +0,0 @@ -# Tweepy -# Copyright 2009-2010 Joshua Roesslein -# See LICENSE for details. - -from __future__ import absolute_import, print_function - -from .utils import parse_datetime, parse_html_value, parse_a_href - - -class ResultSet(list): - """A list like object that holds results from a Twitter API query.""" - def __init__(self, max_id=None, since_id=None): - super(ResultSet, self).__init__() - self._max_id = max_id - self._since_id = since_id - - @property - def max_id(self): - if self._max_id: - return self._max_id - ids = self.ids() - # Max_id is always set to the *smallest* id, minus one, in the set - return (min(ids) - 1) if ids else None - - @property - def since_id(self): - if self._since_id: - return self._since_id - ids = self.ids() - # Since_id is always set to the *greatest* id in the set - return max(ids) if ids else None - - def ids(self): - return [item.id for item in self if hasattr(item, 'id')] - - -class Model(object): - - def __init__(self, api=None): - self._api = api - - def __getstate__(self): - # pickle - pickle = dict(self.__dict__) - try: - del pickle['_api'] # do not pickle the API reference - except KeyError: - pass - return pickle - - @classmethod - def parse(cls, api, json): - """Parse a JSON object into a model instance.""" - raise NotImplementedError - - @classmethod - def parse_list(cls, api, json_list): - """ - Parse a list of JSON objects into - a result set of model instances. - """ - results = ResultSet() - for obj in json_list: - if obj: - results.append(cls.parse(api, obj)) - return results - - def __repr__(self): - state = ['%s=%s' % (k, repr(v)) for (k, v) in vars(self).items()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(state)) - - -class Status(Model): - - @classmethod - def parse(cls, api, json): - status = cls(api) - setattr(status, '_json', json) - for k, v in json.items(): - if k == 'user': - user_model = getattr(api.parser.model_factory, 'user') if api else User - user = user_model.parse(api, v) - setattr(status, 'author', user) - setattr(status, 'user', user) # DEPRECIATED - elif k == 'created_at': - setattr(status, k, parse_datetime(v)) - elif k == 'source': - if '<' in v: - setattr(status, k, parse_html_value(v)) - setattr(status, 'source_url', parse_a_href(v)) - else: - setattr(status, k, v) - setattr(status, 'source_url', None) - elif k == 'retweeted_status': - setattr(status, k, Status.parse(api, v)) - elif k == 'quoted_status': - setattr(status, k, Status.parse(api, v)) - elif k == 'place': - if v is not None: - setattr(status, k, Place.parse(api, v)) - else: - setattr(status, k, None) - else: - setattr(status, k, v) - return status - - def destroy(self): - return self._api.destroy_status(self.id) - - def retweet(self): - return self._api.retweet(self.id) - - def retweets(self): - return self._api.retweets(self.id) - - def favorite(self): - return self._api.create_favorite(self.id) - - def __eq__(self, other): - if isinstance(other, Status): - return self.id == other.id - - return NotImplemented - - def __ne__(self, other): - result = self == other - - if result is NotImplemented: - return result - - return not result - - -class User(Model): - - @classmethod - def parse(cls, api, json): - user = cls(api) - setattr(user, '_json', json) - for k, v in json.items(): - if k == 'created_at': - setattr(user, k, parse_datetime(v)) - elif k == 'status': - setattr(user, k, Status.parse(api, v)) - elif k == 'following': - # twitter sets this to null if it is false - if v is True: - setattr(user, k, True) - else: - setattr(user, k, False) - else: - setattr(user, k, v) - return user - - @classmethod - def parse_list(cls, api, json_list): - if isinstance(json_list, list): - item_list = json_list - else: - item_list = json_list['users'] - - results = ResultSet() - for obj in item_list: - results.append(cls.parse(api, obj)) - return results - - def timeline(self, **kargs): - return self._api.user_timeline(user_id=self.id, **kargs) - - def friends(self, **kargs): - return self._api.friends(user_id=self.id, **kargs) - - def followers(self, **kargs): - return self._api.followers(user_id=self.id, **kargs) - - def follow(self): - self._api.create_friendship(user_id=self.id) - self.following = True - - def unfollow(self): - self._api.destroy_friendship(user_id=self.id) - self.following = False - - def lists_memberships(self, *args, **kargs): - return self._api.lists_memberships(user=self.screen_name, - *args, - **kargs) - - def lists_subscriptions(self, *args, **kargs): - return self._api.lists_subscriptions(user=self.screen_name, - *args, - **kargs) - - def lists(self, *args, **kargs): - return self._api.lists_all(user=self.screen_name, - *args, - **kargs) - - def followers_ids(self, *args, **kargs): - return self._api.followers_ids(user_id=self.id, - *args, - **kargs) - - -class DirectMessage(Model): - - @classmethod - def parse(cls, api, json): - dm = cls(api) - for k, v in json.items(): - if k == 'sender' or k == 'recipient': - setattr(dm, k, User.parse(api, v)) - elif k == 'created_at': - setattr(dm, k, parse_datetime(v)) - else: - setattr(dm, k, v) - return dm - - def destroy(self): - return self._api.destroy_direct_message(self.id) - - -class Friendship(Model): - - @classmethod - def parse(cls, api, json): - relationship = json['relationship'] - - # parse source - source = cls(api) - for k, v in relationship['source'].items(): - setattr(source, k, v) - - # parse target - target = cls(api) - for k, v in relationship['target'].items(): - setattr(target, k, v) - - return source, target - - -class Category(Model): - - @classmethod - def parse(cls, api, json): - category = cls(api) - for k, v in json.items(): - setattr(category, k, v) - return category - - -class SavedSearch(Model): - - @classmethod - def parse(cls, api, json): - ss = cls(api) - for k, v in json.items(): - if k == 'created_at': - setattr(ss, k, parse_datetime(v)) - else: - setattr(ss, k, v) - return ss - - def destroy(self): - return self._api.destroy_saved_search(self.id) - - -class SearchResults(ResultSet): - - @classmethod - def parse(cls, api, json): - metadata = json['search_metadata'] - results = SearchResults() - results.refresh_url = metadata.get('refresh_url') - results.completed_in = metadata.get('completed_in') - results.query = metadata.get('query') - results.count = metadata.get('count') - results.next_results = metadata.get('next_results') - - status_model = getattr(api.parser.model_factory, 'status') if api else Status - - for status in json['statuses']: - results.append(status_model.parse(api, status)) - return results - - -class List(Model): - - @classmethod - def parse(cls, api, json): - lst = List(api) - for k, v in json.items(): - if k == 'user': - setattr(lst, k, User.parse(api, v)) - elif k == 'created_at': - setattr(lst, k, parse_datetime(v)) - else: - setattr(lst, k, v) - return lst - - @classmethod - def parse_list(cls, api, json_list, result_set=None): - results = ResultSet() - if isinstance(json_list, dict): - json_list = json_list['lists'] - for obj in json_list: - results.append(cls.parse(api, obj)) - return results - - def update(self, **kargs): - return self._api.update_list(self.slug, **kargs) - - def destroy(self): - return self._api.destroy_list(self.slug) - - def timeline(self, **kargs): - return self._api.list_timeline(self.user.screen_name, - self.slug, - **kargs) - - def add_member(self, id): - return self._api.add_list_member(self.slug, id) - - def remove_member(self, id): - return self._api.remove_list_member(self.slug, id) - - def members(self, **kargs): - return self._api.list_members(self.user.screen_name, - self.slug, - **kargs) - - def is_member(self, id): - return self._api.is_list_member(self.user.screen_name, - self.slug, - id) - - def subscribe(self): - return self._api.subscribe_list(self.user.screen_name, self.slug) - - def unsubscribe(self): - return self._api.unsubscribe_list(self.user.screen_name, self.slug) - - def subscribers(self, **kargs): - return self._api.list_subscribers(self.user.screen_name, - self.slug, - **kargs) - - def is_subscribed(self, id): - return self._api.is_subscribed_list(self.user.screen_name, - self.slug, - id) - - -class Relation(Model): - @classmethod - def parse(cls, api, json): - result = cls(api) - for k, v in json.items(): - if k == 'value' and json['kind'] in ['Tweet', 'LookedupStatus']: - setattr(result, k, Status.parse(api, v)) - elif k == 'results': - setattr(result, k, Relation.parse_list(api, v)) - else: - setattr(result, k, v) - return result - - -class Relationship(Model): - @classmethod - def parse(cls, api, json): - result = cls(api) - for k, v in json.items(): - if k == 'connections': - setattr(result, 'is_following', 'following' in v) - setattr(result, 'is_followed_by', 'followed_by' in v) - else: - setattr(result, k, v) - return result - - -class JSONModel(Model): - - @classmethod - def parse(cls, api, json): - return json - - -class IDModel(Model): - - @classmethod - def parse(cls, api, json): - if isinstance(json, list): - return json - else: - return json['ids'] - - -class BoundingBox(Model): - - @classmethod - def parse(cls, api, json): - result = cls(api) - if json is not None: - for k, v in json.items(): - setattr(result, k, v) - return result - - def origin(self): - """ - Return longitude, latitude of southwest (bottom, left) corner of - bounding box, as a tuple. - - This assumes that bounding box is always a rectangle, which - appears to be the case at present. - """ - return tuple(self.coordinates[0][0]) - - def corner(self): - """ - Return longitude, latitude of northeast (top, right) corner of - bounding box, as a tuple. - - This assumes that bounding box is always a rectangle, which - appears to be the case at present. - """ - return tuple(self.coordinates[0][2]) - - -class Place(Model): - - @classmethod - def parse(cls, api, json): - place = cls(api) - for k, v in json.items(): - if k == 'bounding_box': - # bounding_box value may be null (None.) - # Example: "United States" (id=96683cc9126741d1) - if v is not None: - t = BoundingBox.parse(api, v) - else: - t = v - setattr(place, k, t) - elif k == 'contained_within': - # contained_within is a list of Places. - setattr(place, k, Place.parse_list(api, v)) - else: - setattr(place, k, v) - return place - - @classmethod - def parse_list(cls, api, json_list): - if isinstance(json_list, list): - item_list = json_list - else: - item_list = json_list['result']['places'] - - results = ResultSet() - for obj in item_list: - results.append(cls.parse(api, obj)) - return results - - -class Media(Model): - - @classmethod - def parse(cls, api, json): - media = cls(api) - for k, v in json.items(): - setattr(media, k, v) - return media - - -class ModelFactory(object): - """ - Used by parsers for creating instances - of models. You may subclass this factory - to add your own extended models. - """ - - status = Status - user = User - direct_message = DirectMessage - friendship = Friendship - saved_search = SavedSearch - search_results = SearchResults - category = Category - list = List - relation = Relation - relationship = Relationship - media = Media - - json = JSONModel - ids = IDModel - place = Place - bounding_box = BoundingBox diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/parsers.py b/libs/apprise/plugins/NotifyTwitter/tweepy/parsers.py deleted file mode 100644 index 371ad5f9e..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/parsers.py +++ /dev/null @@ -1,109 +0,0 @@ -# Tweepy -# Copyright 2009-2010 Joshua Roesslein -# See LICENSE for details. - -from __future__ import print_function - -from .models import ModelFactory -from .utils import import_simplejson -from .error import TweepError - - -class Parser(object): - - def parse(self, method, payload): - """ - Parse the response payload and return the result. - Returns a tuple that contains the result data and the cursors - (or None if not present). - """ - raise NotImplementedError - - def parse_error(self, payload): - """ - Parse the error message and api error code from payload. - Return them as an (error_msg, error_code) tuple. If unable to parse the - message, throw an exception and default error message will be used. - """ - raise NotImplementedError - - -class RawParser(Parser): - - def __init__(self): - pass - - def parse(self, method, payload): - return payload - - def parse_error(self, payload): - return payload - - -class JSONParser(Parser): - - payload_format = 'json' - - def __init__(self): - self.json_lib = import_simplejson() - - def parse(self, method, payload): - try: - json = self.json_lib.loads(payload) - except Exception as e: - raise TweepError('Failed to parse JSON payload: %s' % e) - - needs_cursors = 'cursor' in method.session.params - if needs_cursors and isinstance(json, dict) \ - and 'previous_cursor' in json \ - and 'next_cursor' in json: - cursors = json['previous_cursor'], json['next_cursor'] - return json, cursors - else: - return json - - def parse_error(self, payload): - error_object = self.json_lib.loads(payload) - - if 'error' in error_object: - reason = error_object['error'] - api_code = error_object.get('code') - else: - reason = error_object['errors'] - api_code = [error.get('code') for error in - reason if error.get('code')] - api_code = api_code[0] if len(api_code) == 1 else api_code - - return reason, api_code - - -class ModelParser(JSONParser): - - def __init__(self, model_factory=None): - JSONParser.__init__(self) - self.model_factory = model_factory or ModelFactory - - def parse(self, method, payload): - try: - if method.payload_type is None: - return - model = getattr(self.model_factory, method.payload_type) - except AttributeError: - raise TweepError('No model for this payload type: ' - '%s' % method.payload_type) - - json = JSONParser.parse(self, method, payload) - if isinstance(json, tuple): - json, cursors = json - else: - cursors = None - - if method.payload_list: - result = model.parse_list(method.api, json) - else: - result = model.parse(method.api, json) - - if cursors: - return result, cursors - else: - return result diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/streaming.py b/libs/apprise/plugins/NotifyTwitter/tweepy/streaming.py deleted file mode 100644 index c0961467e..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/streaming.py +++ /dev/null @@ -1,476 +0,0 @@ -# Tweepy -# Copyright 2009-2010 Joshua Roesslein -# See LICENSE for details. - -# Appengine users: https://developers.google.com/appengine/docs/python/sockets/#making_httplib_use_sockets - -from __future__ import absolute_import, print_function - -import logging -import re -import requests -import sys -from requests.exceptions import Timeout -from threading import Thread -from time import sleep - -import six - -import ssl - -from .models import Status -from .api import API -from .error import TweepError - -from .utils import import_simplejson -json = import_simplejson() - -STREAM_VERSION = '1.1' - - -class StreamListener(object): - - def __init__(self, api=None): - self.api = api or API() - - def on_connect(self): - """Called once connected to streaming server. - - This will be invoked once a successful response - is received from the server. Allows the listener - to perform some work prior to entering the read loop. - """ - pass - - def on_data(self, raw_data): - """Called when raw data is received from connection. - - Override this method if you wish to manually handle - the stream data. Return False to stop stream and close connection. - """ - data = json.loads(raw_data) - - if 'in_reply_to_status_id' in data: - status = Status.parse(self.api, data) - if self.on_status(status) is False: - return False - elif 'delete' in data: - delete = data['delete']['status'] - if self.on_delete(delete['id'], delete['user_id']) is False: - return False - elif 'event' in data: - status = Status.parse(self.api, data) - if self.on_event(status) is False: - return False - elif 'direct_message' in data: - status = Status.parse(self.api, data) - if self.on_direct_message(status) is False: - return False - elif 'friends' in data: - if self.on_friends(data['friends']) is False: - return False - elif 'limit' in data: - if self.on_limit(data['limit']['track']) is False: - return False - elif 'disconnect' in data: - if self.on_disconnect(data['disconnect']) is False: - return False - elif 'warning' in data: - if self.on_warning(data['warning']) is False: - return False - else: - logging.error("Unknown message type: " + str(raw_data)) - - def keep_alive(self): - """Called when a keep-alive arrived""" - return - - def on_status(self, status): - """Called when a new status arrives""" - return - - def on_exception(self, exception): - """Called when an unhandled exception occurs.""" - return - - def on_delete(self, status_id, user_id): - """Called when a delete notice arrives for a status""" - return - - def on_event(self, status): - """Called when a new event arrives""" - return - - def on_direct_message(self, status): - """Called when a new direct message arrives""" - return - - def on_friends(self, friends): - """Called when a friends list arrives. - - friends is a list that contains user_id - """ - return - - def on_limit(self, track): - """Called when a limitation notice arrives""" - return - - def on_error(self, status_code): - """Called when a non-200 status code is returned""" - return False - - def on_timeout(self): - """Called when stream connection times out""" - return - - def on_disconnect(self, notice): - """Called when twitter sends a disconnect notice - - Disconnect codes are listed here: - https://dev.twitter.com/docs/streaming-apis/messages#Disconnect_messages_disconnect - """ - return - - def on_warning(self, notice): - """Called when a disconnection warning message arrives""" - return - - -class ReadBuffer(object): - """Buffer data from the response in a smarter way than httplib/requests can. - - Tweets are roughly in the 2-12kb range, averaging around 3kb. - Requests/urllib3/httplib/socket all use socket.read, which blocks - until enough data is returned. On some systems (eg google appengine), socket - reads are quite slow. To combat this latency we can read big chunks, - but the blocking part means we won't get results until enough tweets - have arrived. That may not be a big deal for high throughput systems. - For low throughput systems we don't want to sacrafice latency, so we - use small chunks so it can read the length and the tweet in 2 read calls. - """ - - def __init__(self, stream, chunk_size, encoding='utf-8'): - self._stream = stream - self._buffer = six.b('') - self._chunk_size = chunk_size - self._encoding = encoding - - def read_len(self, length): - while not self._stream.closed: - if len(self._buffer) >= length: - return self._pop(length) - read_len = max(self._chunk_size, length - len(self._buffer)) - self._buffer += self._stream.read(read_len) - return six.b('') - - def read_line(self, sep=six.b('\n')): - """Read the data stream until a given separator is found (default \n) - - :param sep: Separator to read until. Must by of the bytes type (str in python 2, - bytes in python 3) - :return: The str of the data read until sep - """ - start = 0 - while not self._stream.closed: - loc = self._buffer.find(sep, start) - if loc >= 0: - return self._pop(loc + len(sep)) - else: - start = len(self._buffer) - self._buffer += self._stream.read(self._chunk_size) - return six.b('') - - def _pop(self, length): - r = self._buffer[:length] - self._buffer = self._buffer[length:] - return r.decode(self._encoding) - - -class Stream(object): - - host = 'stream.twitter.com' - - def __init__(self, auth, listener, **options): - self.auth = auth - self.listener = listener - self.running = False - self.timeout = options.get("timeout", 300.0) - self.retry_count = options.get("retry_count") - # values according to - # https://dev.twitter.com/docs/streaming-apis/connecting#Reconnecting - self.retry_time_start = options.get("retry_time", 5.0) - self.retry_420_start = options.get("retry_420", 60.0) - self.retry_time_cap = options.get("retry_time_cap", 320.0) - self.snooze_time_step = options.get("snooze_time", 0.25) - self.snooze_time_cap = options.get("snooze_time_cap", 16) - - # The default socket.read size. Default to less than half the size of - # a tweet so that it reads tweets with the minimal latency of 2 reads - # per tweet. Values higher than ~1kb will increase latency by waiting - # for more data to arrive but may also increase throughput by doing - # fewer socket read calls. - self.chunk_size = options.get("chunk_size", 512) - - self.verify = options.get("verify", True) - - self.api = API() - self.headers = options.get("headers") or {} - self.new_session() - self.body = None - self.retry_time = self.retry_time_start - self.snooze_time = self.snooze_time_step - - # Example: proxies = {'http': 'http://localhost:1080', 'https': 'http://localhost:1080'} - self.proxies = options.get("proxies") - - def new_session(self): - self.session = requests.Session() - self.session.headers = self.headers - self.session.params = None - - def _run(self): - # Authenticate - url = "https://%s%s" % (self.host, self.url) - - # Connect and process the stream - error_counter = 0 - resp = None - exc_info = None - while self.running: - if self.retry_count is not None: - if error_counter > self.retry_count: - # quit if error count greater than retry count - break - try: - auth = self.auth.apply_auth() - resp = self.session.request('POST', - url, - data=self.body, - timeout=self.timeout, - stream=True, - auth=auth, - verify=self.verify, - proxies = self.proxies) - if resp.status_code != 200: - if self.listener.on_error(resp.status_code) is False: - break - error_counter += 1 - if resp.status_code == 420: - self.retry_time = max(self.retry_420_start, - self.retry_time) - sleep(self.retry_time) - self.retry_time = min(self.retry_time * 2, - self.retry_time_cap) - else: - error_counter = 0 - self.retry_time = self.retry_time_start - self.snooze_time = self.snooze_time_step - self.listener.on_connect() - self._read_loop(resp) - except (Timeout, ssl.SSLError) as exc: - # This is still necessary, as a SSLError can actually be - # thrown when using Requests - # If it's not time out treat it like any other exception - if isinstance(exc, ssl.SSLError): - if not (exc.args and 'timed out' in str(exc.args[0])): - exc_info = sys.exc_info() - break - if self.listener.on_timeout() is False: - break - if self.running is False: - break - sleep(self.snooze_time) - self.snooze_time = min(self.snooze_time + self.snooze_time_step, - self.snooze_time_cap) - except Exception as exc: - exc_info = sys.exc_info() - # any other exception is fatal, so kill loop - break - - # cleanup - self.running = False - if resp: - resp.close() - - self.new_session() - - if exc_info: - # call a handler first so that the exception can be logged. - self.listener.on_exception(exc_info[1]) - six.reraise(*exc_info) - - def _data(self, data): - if self.listener.on_data(data) is False: - self.running = False - - def _read_loop(self, resp): - charset = resp.headers.get('content-type', default='') - enc_search = re.search(r'charset=(?P<enc>\S*)', charset) - if enc_search is not None: - encoding = enc_search.group('enc') - else: - encoding = 'utf-8' - - buf = ReadBuffer(resp.raw, self.chunk_size, encoding=encoding) - - while self.running and not resp.raw.closed: - length = 0 - while not resp.raw.closed: - line = buf.read_line() - stripped_line = line.strip() if line else line # line is sometimes None so we need to check here - if not stripped_line: - self.listener.keep_alive() # keep-alive new lines are expected - elif stripped_line.isdigit(): - length = int(stripped_line) - break - else: - raise TweepError('Expecting length, unexpected value found') - - next_status_obj = buf.read_len(length) - if self.running and next_status_obj: - self._data(next_status_obj) - - # # Note: keep-alive newlines might be inserted before each length value. - # # read until we get a digit... - # c = b'\n' - # for c in resp.iter_content(decode_unicode=True): - # if c == b'\n': - # continue - # break - # - # delimited_string = c - # - # # read rest of delimiter length.. - # d = b'' - # for d in resp.iter_content(decode_unicode=True): - # if d != b'\n': - # delimited_string += d - # continue - # break - # - # # read the next twitter status object - # if delimited_string.decode('utf-8').strip().isdigit(): - # status_id = int(delimited_string) - # next_status_obj = resp.raw.read(status_id) - # if self.running: - # self._data(next_status_obj.decode('utf-8')) - - - if resp.raw.closed: - self.on_closed(resp) - - def _start(self, is_async): - self.running = True - if is_async: - self._thread = Thread(target=self._run) - self._thread.start() - else: - self._run() - - def on_closed(self, resp): - """ Called when the response has been closed by Twitter """ - pass - - def userstream(self, - stall_warnings=False, - _with=None, - replies=None, - track=None, - locations=None, - is_async=False, - encoding='utf8'): - self.session.params = {'delimited': 'length'} - if self.running: - raise TweepError('Stream object already connected!') - self.url = '/%s/user.json' % STREAM_VERSION - self.host = 'userstream.twitter.com' - if stall_warnings: - self.session.params['stall_warnings'] = stall_warnings - if _with: - self.session.params['with'] = _with - if replies: - self.session.params['replies'] = replies - if locations and len(locations) > 0: - if len(locations) % 4 != 0: - raise TweepError("Wrong number of locations points, " - "it has to be a multiple of 4") - self.session.params['locations'] = ','.join(['%.2f' % l for l in locations]) - if track: - self.session.params['track'] = u','.join(track).encode(encoding) - - self._start(is_async) - - def firehose(self, count=None, is_async=False): - self.session.params = {'delimited': 'length'} - if self.running: - raise TweepError('Stream object already connected!') - self.url = '/%s/statuses/firehose.json' % STREAM_VERSION - if count: - self.url += '&count=%s' % count - self._start(is_async) - - def retweet(self, is_async=False): - self.session.params = {'delimited': 'length'} - if self.running: - raise TweepError('Stream object already connected!') - self.url = '/%s/statuses/retweet.json' % STREAM_VERSION - self._start(is_async) - - def sample(self, is_async=False, languages=None, stall_warnings=False): - self.session.params = {'delimited': 'length'} - if self.running: - raise TweepError('Stream object already connected!') - self.url = '/%s/statuses/sample.json' % STREAM_VERSION - if languages: - self.session.params['language'] = ','.join(map(str, languages)) - if stall_warnings: - self.session.params['stall_warnings'] = 'true' - self._start(is_async) - - def filter(self, follow=None, track=None, is_async=False, locations=None, - stall_warnings=False, languages=None, encoding='utf8', filter_level=None): - self.body = {} - self.session.headers['Content-type'] = "application/x-www-form-urlencoded" - if self.running: - raise TweepError('Stream object already connected!') - self.url = '/%s/statuses/filter.json' % STREAM_VERSION - if follow: - self.body['follow'] = u','.join(follow).encode(encoding) - if track: - self.body['track'] = u','.join(track).encode(encoding) - if locations and len(locations) > 0: - if len(locations) % 4 != 0: - raise TweepError("Wrong number of locations points, " - "it has to be a multiple of 4") - self.body['locations'] = u','.join(['%.4f' % l for l in locations]) - if stall_warnings: - self.body['stall_warnings'] = stall_warnings - if languages: - self.body['language'] = u','.join(map(str, languages)) - if filter_level: - self.body['filter_level'] = filter_level.encode(encoding) - self.session.params = {'delimited': 'length'} - self.host = 'stream.twitter.com' - self._start(is_async) - - def sitestream(self, follow, stall_warnings=False, - with_='user', replies=False, is_async=False): - self.body = {} - if self.running: - raise TweepError('Stream object already connected!') - self.url = '/%s/site.json' % STREAM_VERSION - self.body['follow'] = u','.join(map(six.text_type, follow)) - self.body['delimited'] = 'length' - if stall_warnings: - self.body['stall_warnings'] = stall_warnings - if with_: - self.body['with'] = with_ - if replies: - self.body['replies'] = replies - self._start(is_async) - - def disconnect(self): - if self.running is False: - return - self.running = False diff --git a/libs/apprise/plugins/NotifyTwitter/tweepy/utils.py b/libs/apprise/plugins/NotifyTwitter/tweepy/utils.py deleted file mode 100644 index e3843a73d..000000000 --- a/libs/apprise/plugins/NotifyTwitter/tweepy/utils.py +++ /dev/null @@ -1,50 +0,0 @@ -# Tweepy -# Copyright 2010 Joshua Roesslein -# See LICENSE for details. - -from __future__ import print_function - -from datetime import datetime - -import six - -from email.utils import parsedate - - -def parse_datetime(string): - return datetime(*(parsedate(string)[:6])) - - -def parse_html_value(html): - - return html[html.find('>')+1:html.rfind('<')] - - -def parse_a_href(atag): - - start = atag.find('"') + 1 - end = atag.find('"', start) - return atag[start:end] - - -def convert_to_utf8_str(arg): - # written by Michael Norton (http://docondev.blogspot.com/) - if isinstance(arg, six.text_type): - arg = arg.encode('utf-8') - elif not isinstance(arg, bytes): - arg = six.text_type(arg).encode('utf-8') - return arg - - -def import_simplejson(): - try: - import simplejson as json - except ImportError: - import json - - return json - - -def list_to_csv(item_list): - if item_list: - return ','.join([str(i) for i in item_list]) diff --git a/libs/apprise/plugins/__init__.py b/libs/apprise/plugins/__init__.py index d441494c6..ec6780a6a 100644 --- a/libs/apprise/plugins/__init__.py +++ b/libs/apprise/plugins/__init__.py @@ -37,7 +37,6 @@ from . import NotifyEmail as NotifyEmailBase # Required until re-factored into base code from .NotifyPushjet import pushjet from .NotifyGrowl import gntp -from .NotifyTwitter import tweepy # NotifyBase object is passed in as a module not class from . import NotifyBase @@ -66,9 +65,6 @@ __all__ = [ # pushjet (used for NotifyPushjet Testing) 'pushjet', - - # tweepy (used for NotifyTwitter Testing) - 'tweepy', ] # we mirror our base purely for the ability to reset everything; this diff --git a/libs/apprise/utils.py b/libs/apprise/utils.py index ef8c840f5..0dcc6cb00 100644 --- a/libs/apprise/utils.py +++ b/libs/apprise/utils.py @@ -42,12 +42,16 @@ except ImportError: from urllib.parse import urlparse # URL Indexing Table for returns via parse_url() +# The below accepts and scans for: +# - schema:// +# - schema://path +# - schema://path?kwargs +# VALID_URL_RE = re.compile( - r'^[\s]*(?P<schema>[^:\s]+):[/\\]*(?P<path>[^?]+)' - r'(\?(?P<kwargs>.+))?[\s]*$', + r'^[\s]*((?P<schema>[^:\s]+):[/\\]+)?((?P<path>[^?]+)' + r'(\?(?P<kwargs>.+))?)?[\s]*$', ) -VALID_HOST_RE = re.compile(r'^[\s]*(?P<path>[^?\s]+)(\?(?P<kwargs>.+))?') -VALID_QUERY_RE = re.compile(r'^(?P<path>.*[/\\])(?P<query>[^/\\]*)$') +VALID_QUERY_RE = re.compile(r'^(?P<path>.*[/\\])(?P<query>[^/\\]+)?$') # delimiters used to separate values when content is passed in by string. # This is useful when turning a string into a list @@ -100,10 +104,10 @@ GET_SCHEMA_RE = re.compile(r'\s*(?P<schema>[a-z0-9]{2,9})://.*$', re.I) # Regular expression based and expanded from: # http://www.regular-expressions.info/email.html GET_EMAIL_RE = re.compile( - r"((?P<label>[^+]+)\+)?" + r"(?P<fulluser>((?P<label>[^+]+)\+)?" r"(?P<userid>[a-z0-9$%=_~-]+" r"(?:\.[a-z0-9$%+=_~-]+)" - r"*)@(?P<domain>(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+" + r"*))@(?P<domain>(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+" r"[a-z0-9](?:[a-z0-9-]*" r"[a-z0-9]))?", re.IGNORECASE, @@ -251,6 +255,7 @@ def parse_url(url, default_schema='http', verify_host=True): <schema>://<host>:<port>/<path> <schema>://<host>/<path> <schema>://<host> + <host> Argument parsing is also supported: <schema>://<user>@<host>:<port>/<path>?key1=val&key2=val2 @@ -277,7 +282,7 @@ def parse_url(url, default_schema='http', verify_host=True): # The port (if specified) 'port': None, # The hostname - 'host': None, + 'host': '', # The full path (query + path) 'fullpath': None, # The path @@ -304,41 +309,30 @@ def parse_url(url, default_schema='http', verify_host=True): qsdata = '' match = VALID_URL_RE.search(url) if match: - # Extract basic results - result['schema'] = match.group('schema').lower().strip() - host = match.group('path').strip() - try: - qsdata = match.group('kwargs').strip() - except AttributeError: - # No qsdata - pass + # Extract basic results (with schema present) + result['schema'] = match.group('schema').lower().strip() \ + if match.group('schema') else default_schema + host = match.group('path').strip() \ + if match.group('path') else '' + qsdata = match.group('kwargs').strip() \ + if match.group('kwargs') else None else: - match = VALID_HOST_RE.search(url) - if not match: - return None - result['schema'] = default_schema - host = match.group('path').strip() - try: - qsdata = match.group('kwargs').strip() - except AttributeError: - # No qsdata - pass + # Could not extract basic content from the URL + return None # Parse Query Arugments ?val=key&key=val # while ensuring that all keys are lowercase if qsdata: result.update(parse_qsd(qsdata)) - # Now do a proper extraction of data + # Now do a proper extraction of data; http:// is just substitued in place + # to allow urlparse() to function as expected, we'll swap this back to the + # expected schema after. parsed = urlparse('http://%s' % host) # Parse results result['host'] = parsed[1].strip() - if not result['host']: - # Nothing more we can do without a hostname - return None - result['fullpath'] = quote(unquote(tidy_path(parsed[2].strip()))) try: @@ -359,11 +353,10 @@ def parse_url(url, default_schema='http', verify_host=True): else: # Using full path, extract query from path match = VALID_QUERY_RE.search(result['fullpath']) - if match: - result['path'] = match.group('path') - result['query'] = match.group('query') - if not result['query']: - result['query'] = None + result['path'] = match.group('path') + result['query'] = match.group('query') + if not result['query']: + result['query'] = None try: (result['user'], result['host']) = \ re.split(r'[@]+', result['host'])[:2] |