Hello community,

here is the log from the commit of package python-python-twitter for 
openSUSE:Factory checked in at 2018-06-29 22:27:21
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-python-twitter (Old)
 and      /work/SRC/openSUSE:Factory/.python-python-twitter.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-python-twitter"

Fri Jun 29 22:27:21 2018 rev:2 rq:617157 version:3.4.2

Changes:
--------
--- 
/work/SRC/openSUSE:Factory/python-python-twitter/python-python-twitter.changes  
    2018-03-05 13:37:51.639936735 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-python-twitter.new/python-python-twitter.changes
 2018-06-29 22:27:22.222439601 +0200
@@ -1,0 +2,13 @@
+Sun Jun 10 20:01:43 UTC 2018 - [email protected]
+
+- update to version 3.4.2:
+ * Bugfixes:
+  * Allow upload of GIFs with size up to 15mb. See #538
+- update to version 3.4.1:
+ * Bugfixes:
+  * Fix an issue where 
:py:func:`twitter.twitter_utils.calc_expected_status_length` was failing for 
python 2 due to a failure to convert a bytes string to unicode. Github issue 
#546.
+  * Documentation fix for :py:func:`twitter.api.Api.UsersLookup`. UsersLookup 
can take a string or a list and properly parses both of them now. Github issues 
#535 and #549.
+  * Properly decode response content for 
:py:func:`twitter.twitter_utils.http_to_file`. Github issue #521.
+  * Fix an issue with loading extended_tweet entities from Streaming API where 
tweets would be truncated when converting to a 
:py:class:`twitter.models.Status`. Github issues #491 and #506.
+
+-------------------------------------------------------------------

Old:
----
  python-twitter-3.4.tar.gz

New:
----
  python-twitter-3.4.2.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-python-twitter.spec ++++++
--- /var/tmp/diff_new_pack.omurHB/_old  2018-06-29 22:27:22.998439014 +0200
+++ /var/tmp/diff_new_pack.omurHB/_new  2018-06-29 22:27:22.998439014 +0200
@@ -19,7 +19,7 @@
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 %define oldpython python
 Name:           python-python-twitter
-Version:        3.4
+Version:        3.4.2
 Release:        0
 Summary:        A Python wrapper around the Twitter API
 License:        Apache-2.0

++++++ python-twitter-3.4.tar.gz -> python-twitter-3.4.2.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/PKG-INFO 
new/python-twitter-3.4.2/PKG-INFO
--- old/python-twitter-3.4/PKG-INFO     2018-02-19 05:07:43.000000000 +0100
+++ new/python-twitter-3.4.2/PKG-INFO   2018-06-07 19:33:41.000000000 +0200
@@ -1,13 +1,14 @@
-Metadata-Version: 1.1
+Metadata-Version: 1.2
 Name: python-twitter
-Version: 3.4
+Version: 3.4.2
 Summary: A Python wrapper around the Twitter API
 Home-page: https://github.com/bear/python-twitter
 Author: The Python-Twitter Developers
 Author-email: [email protected]
+Maintainer: The Python-Twitter Developers
+Maintainer-email: [email protected]
 License: Apache License 2.0
 Download-URL: https://pypi.python.org/pypi/python-twitter
-Description-Content-Type: UNKNOWN
 Description: Python Twitter
         
         A Python wrapper around the Twitter API.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/python_twitter.egg-info/PKG-INFO 
new/python-twitter-3.4.2/python_twitter.egg-info/PKG-INFO
--- old/python-twitter-3.4/python_twitter.egg-info/PKG-INFO     2018-02-19 
05:07:43.000000000 +0100
+++ new/python-twitter-3.4.2/python_twitter.egg-info/PKG-INFO   2018-06-07 
19:33:41.000000000 +0200
@@ -1,13 +1,14 @@
-Metadata-Version: 1.1
+Metadata-Version: 1.2
 Name: python-twitter
-Version: 3.4
+Version: 3.4.2
 Summary: A Python wrapper around the Twitter API
 Home-page: https://github.com/bear/python-twitter
 Author: The Python-Twitter Developers
 Author-email: [email protected]
+Maintainer: The Python-Twitter Developers
+Maintainer-email: [email protected]
 License: Apache License 2.0
 Download-URL: https://pypi.python.org/pypi/python-twitter
-Description-Content-Type: UNKNOWN
 Description: Python Twitter
         
         A Python wrapper around the Twitter API.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/setup.cfg 
new/python-twitter-3.4.2/setup.cfg
--- old/python-twitter-3.4/setup.cfg    2018-02-19 05:07:43.000000000 +0100
+++ new/python-twitter-3.4.2/setup.cfg  2018-06-07 19:33:41.000000000 +0200
@@ -13,7 +13,7 @@
 [flake8]
 ignore = E111,E124,E126,E221,E501
 
-[pep8]
+[pycodestyle]
 ignore = E111,E124,E126,E221,E501
 max-line-length = 100
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/twitter/__init__.py 
new/python-twitter-3.4.2/twitter/__init__.py
--- old/python-twitter-3.4/twitter/__init__.py  2018-02-19 04:27:08.000000000 
+0100
+++ new/python-twitter-3.4.2/twitter/__init__.py        2018-06-07 
19:29:30.000000000 +0200
@@ -1,8 +1,7 @@
 #!/usr/bin/env python
+# -*- coding: utf-8 -*-
 #
-# vim: sw=2 ts=2 sts=2
-#
-# Copyright 2007 The Python-Twitter Developers
+# Copyright 2007-2018 The Python-Twitter Developers
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -16,14 +15,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""A library that provides a Python interface to the Twitter API"""
+"""A library that provides a Python interface to the Twitter API."""
 from __future__ import absolute_import
 
 __author__       = 'The Python-Twitter Developers'
 __email__        = '[email protected]'
 __copyright__    = 'Copyright (c) 2007-2016 The Python-Twitter Developers'
 __license__      = 'Apache License 2.0'
-__version__      = '3.4'
+__version__      = '3.4.2'
 __url__          = 'https://github.com/bear/python-twitter'
 __download_url__ = 'https://pypi.python.org/pypi/python-twitter'
 __description__  = 'A Python wrapper around the Twitter API'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/twitter/api.py 
new/python-twitter-3.4.2/twitter/api.py
--- old/python-twitter-3.4/twitter/api.py       2018-02-19 04:23:53.000000000 
+0100
+++ new/python-twitter-3.4.2/twitter/api.py     2018-06-07 19:15:14.000000000 
+0200
@@ -37,11 +37,9 @@
 try:
     # python 3
     from urllib.parse import urlparse, urlunparse, urlencode, quote_plus
-    from urllib.request import urlopen
     from urllib.request import __version__ as urllib_version
 except ImportError:
     from urlparse import urlparse, urlunparse
-    from urllib2 import urlopen
     from urllib import urlencode, quote_plus
     from urllib import __version__ as urllib_version
 
@@ -63,7 +61,8 @@
     calc_expected_status_length,
     is_url,
     parse_media_file,
-    enf_type)
+    enf_type,
+    parse_arg_list)
 
 from twitter.error import (
     TwitterError,
@@ -127,7 +126,7 @@
         >>> api.GetUserTimeline(user)
         >>> api.GetHomeTimeline()
         >>> api.GetStatus(status_id)
-        >>> def GetStatuses(status_ids)
+        >>> api.GetStatuses(status_ids)
         >>> api.DestroyStatus(status_id)
         >>> api.GetFriends(user)
         >>> api.GetFollowers()
@@ -1159,9 +1158,13 @@
             else:
                 _, _, file_size, media_type = parse_media_file(media)
                 if file_size > self.chunk_size or media_type in chunked_types:
-                    media_ids.append(self.UploadMediaChunked(media, 
media_additional_owners))
+                    media_ids.append(self.UploadMediaChunked(
+                        media, media_additional_owners, 
media_category=media_category
+                    ))
                 else:
-                    media_ids.append(self.UploadMediaSimple(media, 
media_additional_owners))
+                    media_ids.append(self.UploadMediaSimple(
+                        media, media_additional_owners, 
media_category=media_category
+                    ))
             parameters['media_ids'] = ','.join([str(mid) for mid in media_ids])
 
         if latitude is not None and longitude is not None:
@@ -1263,7 +1266,7 @@
         """
         url = '%s/media/upload.json' % self.upload_url
 
-        media_fp, filename, file_size, media_type = parse_media_file(media)
+        media_fp, filename, file_size, media_type = parse_media_file(media, 
async_upload=True)
 
         if not all([media_fp, filename, file_size, media_type]):
             raise TwitterError({'message': 'Could not process media file'})
@@ -1444,14 +1447,14 @@
 
         if len(words) == 1 and not is_url(words[0]):
             if len(words[0]) > CHARACTER_LIMIT:
-                raise TwitterError({"message": "Unable to split status into 
tweetable parts. Word was: {0}/{1}".format(len(words[0]), char_lim)})
+                raise TwitterError("Unable to split status into tweetable 
parts. Word was: {0}/{1}".format(len(words[0]), char_lim))
             else:
                 tweets.append(words[0])
                 return tweets
 
         for word in words:
             if len(word) > char_lim:
-                raise TwitterError({"message": "Unable to split status into 
tweetable parts. Word was: {0}/{1}".format(len(word), char_lim)})
+                raise TwitterError("Unable to split status into tweetable 
parts. Word was: {0}/{1}".format(len(word), char_lim))
             new_len = line_length
 
             if is_url(word):
@@ -1568,8 +1571,13 @@
         Returns:
           A sequence of twitter.Status instances, one for each message up to 
count
         """
-        return self.GetUserTimeline(since_id=since_id, count=count, 
max_id=max_id, trim_user=trim_user,
-                                    exclude_replies=True, include_rts=True)
+        return self.GetUserTimeline(
+            since_id=since_id,
+            count=count,
+            max_id=max_id,
+            trim_user=trim_user,
+            exclude_replies=True,
+            include_rts=True)
 
     def GetReplies(self,
                    since_id=None,
@@ -1713,25 +1721,20 @@
             When True, the user entities will be included. [Optional]
         """
         url = '%s/statuses/retweets_of_me.json' % self.base_url
-        parameters = {}
         if count is not None:
             try:
                 if int(count) > 100:
                     raise TwitterError({'message': "'count' may not be greater 
than 100"})
             except ValueError:
                 raise TwitterError({'message': "'count' must be an integer"})
-        if count:
-            parameters['count'] = count
-        if since_id:
-            parameters['since_id'] = since_id
-        if max_id:
-            parameters['max_id'] = max_id
-        if trim_user:
-            parameters['trim_user'] = trim_user
-        if not include_entities:
-            parameters['include_entities'] = include_entities
-        if not include_user_entities:
-            parameters['include_user_entities'] = include_user_entities
+        parameters = {
+            'count': count,
+            'since_id': since_id,
+            'max_id': max_id,
+            'trim_user': bool(trim_user),
+            'include_entities': bool(include_entities),
+            'include_user_entities': bool(include_user_entities),
+        }
 
         resp = self._RequestUrl(url, 'GET', data=parameters)
         data = self._ParseAndCheckTwitter(resp.content.decode('utf-8'))
@@ -2787,7 +2790,7 @@
         Args:
           user_id (int, list, optional):
             A list of user_ids to retrieve extended information.
-          screen_name (str, optional):
+          screen_name (str, list, optional):
             A list of screen_names to retrieve extended information.
           users (list, optional):
             A list of twitter.User objects to retrieve extended information.
@@ -2815,11 +2818,13 @@
         if len(uids):
             parameters['user_id'] = ','.join([str(u) for u in uids])
         if screen_name:
-            parameters['screen_name'] = ','.join(screen_name)
+            parameters['screen_name'] = parse_arg_list(screen_name, 
'screen_name')
 
         if len(uids) > 100:
             raise TwitterError("No more than 100 users may be requested per 
request.")
 
+        print(parameters)
+
         resp = self._RequestUrl(url, 'GET', data=parameters)
         data = self._ParseAndCheckTwitter(resp.content.decode('utf-8'))
 
@@ -2915,24 +2920,18 @@
           A sequence of twitter.DirectMessage instances
         """
         url = '%s/direct_messages.json' % self.base_url
-        parameters = {}
-        if since_id:
-            parameters['since_id'] = since_id
-        if max_id:
-            parameters['max_id'] = max_id
+        parameters = {
+            'full_text': bool(full_text),
+            'include_entities': bool(include_entities),
+            'max_id': max_id,
+            'since_id': since_id,
+            'skip_status': bool(skip_status),
+        }
+
         if count:
-            try:
-                parameters['count'] = int(count)
-            except ValueError:
-                raise TwitterError({'message': "count must be an integer"})
-        if not include_entities:
-            parameters['include_entities'] = 'false'
-        if skip_status:
-            parameters['skip_status'] = 1
-        if full_text:
-            parameters['full_text'] = 'true'
+            parameters['count'] = enf_type('count', int, count)
         if page:
-            parameters['page'] = page
+            parameters['page'] = enf_type('page', int, page)
 
         resp = self._RequestUrl(url, 'GET', data=parameters)
         data = self._ParseAndCheckTwitter(resp.content.decode('utf-8'))
@@ -2952,26 +2951,25 @@
         """Returns a list of the direct messages sent by the authenticating 
user.
 
         Args:
-          since_id:
+          since_id (int, optional):
             Returns results with an ID greater than (that is, more recent
             than) the specified ID. There are limits to the number of
             Tweets which can be accessed through the API. If the limit of
             Tweets has occured since the since_id, the since_id will be
-            forced to the oldest ID available. [Optional]
-          max_id:
+            forced to the oldest ID available.
+          max_id (int, optional):
             Returns results with an ID less than (that is, older than) or
-            equal to the specified ID. [Optional]
-          count:
+            equal to the specified ID.
+          count (int, optional):
             Specifies the number of direct messages to try and retrieve, up to 
a
             maximum of 200. The value of count is best thought of as a limit 
to the
             number of Tweets to return because suspended or deleted content is
-            removed after the count has been applied. [Optional]
-          page:
+            removed after the count has been applied.
+          page (int, optional):
             Specifies the page of results to retrieve.
             Note: there are pagination limits. [Optional]
-          include_entities:
+          include_entities (bool, optional):
             The entities node will be omitted when set to False.
-            [Optional]
           return_json (bool, optional):
             If True JSON data will be returned, instead of twitter.User
 
@@ -2979,20 +2977,17 @@
           A sequence of twitter.DirectMessage instances
         """
         url = '%s/direct_messages/sent.json' % self.base_url
-        parameters = {}
-        if since_id:
-            parameters['since_id'] = since_id
-        if page:
-            parameters['page'] = page
-        if max_id:
-            parameters['max_id'] = max_id
+
+        parameters = {
+            'include_entities': bool(include_entities),
+            'max_id': max_id,
+            'since_id': since_id,
+        }
+
         if count:
-            try:
-                parameters['count'] = int(count)
-            except ValueError:
-                raise TwitterError({'message': "count must be an integer"})
-        if not include_entities:
-            parameters['include_entities'] = 'false'
+            parameters['count'] = enf_type('count', int, count)
+        if page:
+            parameters['page'] = enf_type('page', int, page)
 
         resp = self._RequestUrl(url, 'GET', data=parameters)
         data = self._ParseAndCheckTwitter(resp.content.decode('utf-8'))
@@ -3241,7 +3236,7 @@
         parameters = {}
 
         if user_id:
-            if isinstance(user_id, list) or isinstance(user_id, tuple):
+            if isinstance(user_id, (list, tuple)):
                 uids = list()
                 for user in user_id:
                     if isinstance(user, User):
@@ -3255,7 +3250,7 @@
                 else:
                     parameters['user_id'] = enf_type('user_id', int, user_id)
         if screen_name:
-            if isinstance(screen_name, list) or isinstance(screen_name, tuple):
+            if isinstance(screen_name, (list, tuple)):
                 sn_list = list()
                 for user in screen_name:
                     if isinstance(user, User):
@@ -3269,8 +3264,7 @@
                 else:
                     parameters['screen_name'] = enf_type('screen_name', str, 
screen_name)
         if not user_id and not screen_name:
-            raise TwitterError(
-                "Specify at least one of user_id or screen_name.")
+            raise TwitterError("Specify at least one of user_id or 
screen_name.")
 
         resp = self._RequestUrl(url, 'GET', data=parameters)
         data = self._ParseAndCheckTwitter(resp.content.decode('utf-8'))
@@ -3543,20 +3537,17 @@
           A sequence of twitter.Status instances, one for each mention of the 
user.
         """
         url = '%s/statuses/mentions_timeline.json' % self.base_url
-        parameters = {}
+
+        parameters = {
+            'contributor_details': bool(contributor_details),
+            'include_entities': bool(include_entities),
+            'max_id': max_id,
+            'since_id': since_id,
+            'trim_user': bool(trim_user),
+        }
 
         if count:
             parameters['count'] = enf_type('count', int, count)
-        if since_id:
-            parameters['since_id'] = enf_type('since_id', int, since_id)
-        if max_id:
-            parameters['max_id'] = enf_type('max_id', int, max_id)
-        if trim_user:
-            parameters['trim_user'] = 1
-        if contributor_details:
-            parameters['contributor_details'] = 'true'
-        if not include_entities:
-            parameters['include_entities'] = 'false'
 
         resp = self._RequestUrl(url, 'GET', data=parameters)
         data = self._ParseAndCheckTwitter(resp.content.decode('utf-8'))
@@ -4386,49 +4377,37 @@
         """Update's the authenticated user's profile data.
 
         Args:
-          name:
+          name (str, optional):
             Full name associated with the profile.
-            Maximum of 20 characters. [Optional]
-          profileURL:
+          profileURL (str, optional):
             URL associated with the profile.
             Will be prepended with "http://"; if not present.
-            Maximum of 100 characters. [Optional]
-          location:
+          location (str, optional):
             The city or country describing where the user of the account is 
located.
             The contents are not normalized or geocoded in any way.
-            Maximum of 30 characters. [Optional]
-          description:
+          description (str, optional):
             A description of the user owning the account.
-            Maximum of 160 characters. [Optional]
-          profile_link_color:
+          profile_link_color (str, optional):
             hex value of profile color theme. formated without '#' or '0x'. 
Ex:  FF00FF
-            [Optional]
-          include_entities:
+          include_entities (bool, optional):
             The entities node will be omitted when set to False.
-            [Optional]
-          skip_status:
+          skip_status (bool, optional):
             When set to either True, t or 1 then statuses will not be included
-            in the returned user objects. [Optional]
+            in the returned user objects.
 
         Returns:
           A twitter.User instance representing the modified user.
         """
         url = '%s/account/update_profile.json' % (self.base_url)
-        data = {}
-        if name:
-            data['name'] = name
-        if profileURL:
-            data['url'] = profileURL
-        if location:
-            data['location'] = location
-        if description:
-            data['description'] = description
-        if profile_link_color:
-            data['profile_link_color'] = profile_link_color
-        if include_entities:
-            data['include_entities'] = include_entities
-        if skip_status:
-            data['skip_status'] = skip_status
+        data = {
+            'name': name,
+            'url': profileURL,
+            'location': location,
+            'description': description,
+            'profile_link_color': profile_link_color,
+            'include_entities': include_entities,
+            'skip_status': skip_status,
+        }
 
         resp = self._RequestUrl(url, 'POST', data=data)
         data = self._ParseAndCheckTwitter(resp.content.decode('utf-8'))
@@ -4823,10 +4802,10 @@
         # Add any additional path elements to the path
         if path_elements:
             # Filter out the path elements that have a value of None
-            p = [i for i in path_elements if i]
+            filtered_elements = [i for i in path_elements if i]
             if not path.endswith('/'):
                 path += '/'
-            path += '/'.join(p)
+            path += '/'.join(filtered_elements)
 
         # Add any additional query parameters to the query string
         if extra_params and len(extra_params) > 0:
@@ -4881,7 +4860,13 @@
         if not isinstance(parameters, dict):
             raise TwitterError("`parameters` must be a dict.")
         else:
-            return urlencode(dict((k, v) for k, v in parameters.items() if v 
is not None))
+            params = dict()
+            for k, v in parameters.items():
+                if v is not None:
+                    if getattr(v, 'encode', None):
+                        v = v.encode('utf8')
+                    params.update({k: v})
+            return urlencode(params)
 
     def _ParseAndCheckTwitter(self, json_data):
         """Try and parse the JSON returned from Twitter and return
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/twitter/debug.py 
new/python-twitter-3.4.2/twitter/debug.py
--- old/python-twitter-3.4/twitter/debug.py     2017-04-21 00:57:54.000000000 
+0200
+++ new/python-twitter-3.4.2/twitter/debug.py   2018-02-24 19:25:24.000000000 
+0100
@@ -18,8 +18,8 @@
         Returns:
             data
         """
-
         url = "{0}{1}".format(self.base_url, endpoint)
+        print(url)
 
         if verb == 'POST':
             if 'media_ids' in data:
@@ -36,7 +36,7 @@
                     raw_data = requests.post(
                         url,
                         files=data,
-                        auth=self.__auth,
+                        auth=self._Api__auth,
                         timeout=self._timeout
                     )
                 except requests.RequestException as e:
@@ -47,7 +47,7 @@
                     raw_data = requests.post(
                         url,
                         data=data,
-                        auth=self.__auth,
+                        auth=self._Api__auth,
                         timeout=self._timeout
                     )
                 except requests.RequestException as e:
@@ -58,7 +58,7 @@
             try:
                 raw_data = requests.get(
                     url,
-                    auth=self.__auth,
+                    auth=self._Api__auth,
                     timeout=self._timeout)
 
             except requests.RequestException as e:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/twitter/error.py 
new/python-twitter-3.4.2/twitter/error.py
--- old/python-twitter-3.4/twitter/error.py     2018-02-19 04:26:35.000000000 
+0100
+++ new/python-twitter-3.4.2/twitter/error.py   2018-06-07 18:26:47.000000000 
+0200
@@ -18,3 +18,8 @@
 class PythonTwitterDeprecationWarning330(PythonTwitterDeprecationWarning):
     """Warning for features to be removed in version 3.3.0"""
     pass
+
+
+class PythonTwitterDeprecationWarning340(PythonTwitterDeprecationWarning):
+    """Warning for features to be removed in version 3.4.0"""
+    pass
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/twitter/models.py 
new/python-twitter-3.4.2/twitter/models.py
--- old/python-twitter-3.4/twitter/models.py    2018-02-17 14:33:43.000000000 
+0100
+++ new/python-twitter-3.4.2/twitter/models.py  2018-06-07 18:26:47.000000000 
+0200
@@ -499,6 +499,11 @@
         user = None
         user_mentions = None
 
+        # for loading extended tweets from the streaming API.
+        if 'extended_tweet' in data:
+            for k, v in data['extended_tweet'].items():
+                data[k] = v
+
         if 'user' in data:
             user = User.NewFromJsonDict(data['user'])
         if 'retweeted_status' in data:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/twitter/parse_tweet.py 
new/python-twitter-3.4.2/twitter/parse_tweet.py
--- old/python-twitter-3.4/twitter/parse_tweet.py       2017-11-26 
20:39:44.000000000 +0100
+++ new/python-twitter-3.4.2/twitter/parse_tweet.py     2018-06-07 
18:26:47.000000000 +0200
@@ -96,5 +96,5 @@
 
     @staticmethod
     def getURLs(tweet):
-        """ URL : [http://]?[\w\.?/]+""";
+        r""" URL : [http://]?[\w\.?/]+""";
         return re.findall(ParseTweet.regexp["URL"], tweet)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/python-twitter-3.4/twitter/twitter_utils.py 
new/python-twitter-3.4.2/twitter/twitter_utils.py
--- old/python-twitter-3.4/twitter/twitter_utils.py     2018-02-17 
17:49:40.000000000 +0100
+++ new/python-twitter-3.4.2/twitter/twitter_utils.py   2018-06-07 
19:15:14.000000000 +0200
@@ -15,10 +15,14 @@
 
 import requests
 from twitter import TwitterError
+import twitter
 
 if sys.version_info < (3,):
     range = xrange
 
+if sys.version_info > (3,):
+    unicode = str
+
 CHAR_RANGES = [
     range(0, 4351),
     range(8192, 8205),
@@ -177,6 +181,8 @@
 
     """
     status_length = 0
+    if isinstance(status, bytes):
+        status = unicode(status)
     for word in re.split(r'\s', status):
         if is_url(word):
             status_length += short_url_length
@@ -205,16 +211,18 @@
 def http_to_file(http):
     data_file = NamedTemporaryFile()
     req = requests.get(http, stream=True)
-    data_file.write(req.raw.data)
+    for chunk in req.iter_content(chunk_size=1024 * 1024):
+        data_file.write(chunk)
     return data_file
 
 
-def parse_media_file(passed_media):
+def parse_media_file(passed_media, async_upload=False):
     """ Parses a media file and attempts to return a file-like object and
     information about the media file.
 
     Args:
         passed_media: media file which to parse.
+        async_upload: flag, for validation media file attributes.
 
     Returns:
         file-like object, the filename of the media file, the file size, and
@@ -222,9 +230,11 @@
     """
     img_formats = ['image/jpeg',
                    'image/png',
-                   'image/gif',
                    'image/bmp',
                    'image/webp']
+    long_img_formats = [
+        'image/gif'
+    ]
     video_formats = ['video/mp4',
                      'video/quicktime']
 
@@ -259,9 +269,13 @@
     if media_type is not None:
         if media_type in img_formats and file_size > 5 * 1048576:
             raise TwitterError({'message': 'Images must be less than 5MB.'})
-        elif media_type in video_formats and file_size > 15 * 1048576:
+        elif media_type in long_img_formats and file_size > 15 * 1048576:
+            raise TwitterError({'message': 'GIF Image must be less than 
15MB.'})
+        elif media_type in video_formats and not async_upload and file_size > 
15 * 1048576:
             raise TwitterError({'message': 'Videos must be less than 15MB.'})
-        elif media_type not in img_formats and media_type not in video_formats:
+        elif media_type in video_formats and async_upload and file_size > 512 
* 1048576:
+            raise TwitterError({'message': 'Videos must be less than 512MB.'})
+        elif media_type not in img_formats and media_type not in video_formats 
and media_type not in long_img_formats:
             raise TwitterError({'message': 'Media type could not be 
determined.'})
 
     return data_file, filename, file_size, media_type
@@ -290,3 +304,18 @@
         raise TwitterError({
             'message': '"{0}" must be type {1}'.format(field, _type.__name__)
         })
+
+
+def parse_arg_list(args, attr):
+    out = []
+    if isinstance(args, (str, unicode)):
+        out.append(args)
+    elif isinstance(args, twitter.User):
+        out.append(getattr(args, attr))
+    elif isinstance(args, (list, tuple)):
+        for item in args:
+            if isinstance(item, (str, unicode)):
+                out.append(item)
+            elif isinstance(item, twitter.User):
+                out.append(getattr(item, attr))
+    return ",".join([str(item) for item in out])


Reply via email to