Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-fanficfare for 
openSUSE:Factory checked in at 2021-06-09 21:52:30
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-fanficfare (Old)
 and      /work/SRC/openSUSE:Factory/.python-fanficfare.new.32437 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-fanficfare"

Wed Jun  9 21:52:30 2021 rev:33 rq:898340 version:4.3.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-fanficfare/python-fanficfare.changes      
2021-05-05 20:40:09.346876388 +0200
+++ 
/work/SRC/openSUSE:Factory/.python-fanficfare.new.32437/python-fanficfare.changes
   2021-06-09 21:52:49.778531160 +0200
@@ -1,0 +2,13 @@
+Tue Jun  1 09:20:32 UTC 2021 - Matej Cepl <[email protected]>
+
+- Update to 4.3.0:
+  - quotev.com: use_cloudscraper:true by default.
+  - Update translations
+  - adapter_bdsmlibrarycom: Set author Anonymous when author not found
+    instead of sys.exit(). Closes #696
+  - Fix XF authorUrl and author_avatar_cover feature. Closes #695
+  - Catch exceptions in word count.
+  - Add CLI --color option for warns and fails. Closes #692
+  - Report browser cache load fail as such.
+
+-------------------------------------------------------------------

Old:
----
  FanFicFare-4.2.0.tar.gz

New:
----
  FanFicFare-4.3.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-fanficfare.spec ++++++
--- /var/tmp/diff_new_pack.MI2SDM/_old  2021-06-09 21:52:50.430532323 +0200
+++ /var/tmp/diff_new_pack.MI2SDM/_new  2021-06-09 21:52:50.430532323 +0200
@@ -21,7 +21,7 @@
 %define skip_python2 1
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-fanficfare
-Version:        4.2.0
+Version:        4.3.0
 Release:        0
 Summary:        Tool for making eBooks from stories on fanfiction and other 
web sites
 License:        GPL-3.0-only

++++++ FanFicFare-4.2.0.tar.gz -> FanFicFare-4.3.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/FanFicFare-4.2.0/calibre-plugin/__init__.py 
new/FanFicFare-4.3.0/calibre-plugin/__init__.py
--- old/FanFicFare-4.2.0/calibre-plugin/__init__.py     2021-04-30 
15:42:27.000000000 +0200
+++ new/FanFicFare-4.3.0/calibre-plugin/__init__.py     2021-05-30 
20:09:12.000000000 +0200
@@ -33,7 +33,7 @@
 from calibre.customize import InterfaceActionBase
 
 # pulled out from FanFicFareBase for saving in prefs.py
-__version__ = (4, 2, 0)
+__version__ = (4, 3, 0)
 
 ## Apparently the name for this class doesn't matter--it was still
 ## 'demo' for the first few versions.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/FanFicFare-4.2.0/calibre-plugin/jobs.py 
new/FanFicFare-4.3.0/calibre-plugin/jobs.py
--- old/FanFicFare-4.2.0/calibre-plugin/jobs.py 2021-04-30 15:42:27.000000000 
+0200
+++ new/FanFicFare-4.3.0/calibre-plugin/jobs.py 2021-05-30 20:09:12.000000000 
+0200
@@ -395,13 +395,16 @@
 
             if options['do_wordcount'] == SAVE_YES or (
                 options['do_wordcount'] == SAVE_YES_UNLESS_SITE and not 
story.getMetadataRaw('numWords') ):
-                wordcount = get_word_count(outfile)
-                # logger.info("get_word_count:%s"%wordcount)
-                story.setMetadata('numWords',wordcount)
-                writer.writeStory(outfilename=outfile, forceOverwrite=True)
-                book['all_metadata'] = 
story.getAllMetadata(removeallentities=True)
-                if options['savemetacol'] != '':
-                    book['savemetacol'] = story.dump_html_metadata()
+                try:
+                    wordcount = get_word_count(outfile)
+                   # logger.info("get_word_count:%s"%wordcount)
+                    story.setMetadata('numWords',wordcount)
+                    writer.writeStory(outfilename=outfile, forceOverwrite=True)
+                    book['all_metadata'] = 
story.getAllMetadata(removeallentities=True)
+                    if options['savemetacol'] != '':
+                        book['savemetacol'] = story.dump_html_metadata()
+                except:
+                    logger.error("WordCount failed")
 
             if options['smarten_punctuation'] and options['fileform'] == 
"epub" \
                     and calibre_version >= (0, 9, 39):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/FanFicFare-4.2.0/calibre-plugin/plugin-defaults.ini 
new/FanFicFare-4.3.0/calibre-plugin/plugin-defaults.ini
--- old/FanFicFare-4.2.0/calibre-plugin/plugin-defaults.ini     2021-04-30 
15:42:27.000000000 +0200
+++ new/FanFicFare-4.3.0/calibre-plugin/plugin-defaults.ini     2021-05-30 
20:09:12.000000000 +0200
@@ -2226,7 +2226,7 @@
 
 [quotev.com]
 use_basic_cache:true
-user_agent:Mozilla/5.0
+use_cloudscraper:true
 slow_down_sleep_time:2
 extra_valid_entries:pages,readers,reads,favorites,searchtags,comments
 pages_label:Pages
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/FanFicFare-4.2.0/calibre-plugin/translations/fr.po 
new/FanFicFare-4.3.0/calibre-plugin/translations/fr.po
--- old/FanFicFare-4.2.0/calibre-plugin/translations/fr.po      2021-04-30 
15:42:27.000000000 +0200
+++ new/FanFicFare-4.3.0/calibre-plugin/translations/fr.po      2021-05-30 
20:09:12.000000000 +0200
@@ -17,8 +17,8 @@
 msgstr ""
 "Project-Id-Version: calibre-plugins\n"
 "POT-Creation-Date: 2021-02-12 13:32-0600\n"
-"PO-Revision-Date: 2021-02-12 22:21+0000\n"
-"Last-Translator: Transifex Bot <>\n"
+"PO-Revision-Date: 2021-05-18 10:39+0000\n"
+"Last-Translator: Ptit Prince <[email protected]>\n"
 "Language-Team: French 
(http://www.transifex.com/calibre/calibre-plugins/language/fr/)\n"
 "MIME-Version: 1.0\n"
 "Content-Type: text/plain; charset=UTF-8\n"
@@ -2381,7 +2381,7 @@
 
 #: jobs.py:73
 msgid "Launch background process for site %s:"
-msgstr ""
+msgstr "Lancer le processus d'arri??re-plan pour le site %s :"
 
 #: jobs.py:90
 msgid "Downloading FanFiction Stories"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/FanFicFare-4.2.0/fanficfare/adapters/adapter_bdsmlibrarycom.py 
new/FanFicFare-4.3.0/fanficfare/adapters/adapter_bdsmlibrarycom.py
--- old/FanFicFare-4.2.0/fanficfare/adapters/adapter_bdsmlibrarycom.py  
2021-04-30 15:42:27.000000000 +0200
+++ new/FanFicFare-4.3.0/fanficfare/adapters/adapter_bdsmlibrarycom.py  
2021-05-30 20:09:12.000000000 +0200
@@ -112,24 +112,17 @@
 
         # Author
         author = soup.find('a', 
href=re.compile(r"/stories/author.php\?authorid=\d+"))
-        i = 0
-        while author == None:
-            time.sleep(1)
-            logger.warning('A problem retrieving the author information. 
Trying Again')
-            data = self.get_request(self.url)
-            soup = self.make_soup(data)
-            author = soup.find('a', 
href=re.compile(r"/stories/author.php\?authorid=\d+"))
-            i += 1
-            if i == 20:
-                logger.info('Too Many cycles... exiting')
-                sys.exit()
-
-
-        authorurl = urlparse.urljoin(self.url, author['href'])
-        self.story.setMetadata('author', author.text)
-        self.story.setMetadata('authorUrl', authorurl)
-        authorid = author['href'].split('=')[1]
-        self.story.setMetadata('authorId', authorid)
+        if author:
+            authorurl = urlparse.urljoin(self.url, author['href'])
+            self.story.setMetadata('author', author.text)
+            self.story.setMetadata('authorUrl', authorurl)
+            authorid = author['href'].split('=')[1]
+            self.story.setMetadata('authorId', authorid)
+        else:
+            logger.info("Failed to find Author, setting to Anonymous")
+            self.story.setMetadata('author','Anonymous')
+            self.story.setMetadata('authorUrl','https://' + 
self.getSiteDomain() + '/')
+            self.story.setMetadata('authorId','0')
 
         # Find the chapters:
         # The update date is with the chapter links... so we will update it 
here as well
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/FanFicFare-4.2.0/fanficfare/adapters/base_xenforo2forum_adapter.py 
new/FanFicFare-4.3.0/fanficfare/adapters/base_xenforo2forum_adapter.py
--- old/FanFicFare-4.2.0/fanficfare/adapters/base_xenforo2forum_adapter.py      
2021-04-30 15:42:27.000000000 +0200
+++ new/FanFicFare-4.3.0/fanficfare/adapters/base_xenforo2forum_adapter.py      
2021-05-30 20:09:12.000000000 +0200
@@ -142,12 +142,13 @@
     def parse_author(self,souptag):
         user = souptag.find('section',{'class':'message-user'})
         a = user.find('a',{'class':'username'})
+        authorUrl = None
         if a:
             # logger.debug(a)
             self.story.addToList('authorId',a['href'].split('/')[-2])
             authorUrl = a['href']
             if not authorUrl.startswith('http'):
-                authorUrl = self.getURLPrefix()+authorUrl
+                authorUrl = self.getURLDomain()+authorUrl
             self.story.addToList('authorUrl',authorUrl)
             self.story.addToList('author',a.text)
         else:
@@ -157,6 +158,14 @@
             self.story.setMetadata('authorUrl',self.getURLPrefix())
             self.story.setMetadata('authorId','0')
 
+        # 
logger.debug("author_avatar_cover:%s"%self.getConfig('author_avatar_cover'))
+        if self.getConfig('author_avatar_cover') and authorUrl:
+            authorcard = self.make_soup(self.get_request(authorUrl))
+            # logger.debug(authorcard)
+            covera = 
authorcard.find('span',{'class':'avatarWrapper'}).find('a')
+            if covera:
+                self.setCoverImage(self.url,self.getURLDomain()+covera['href'])
+
     def cache_posts(self,topsoup):
         for post in topsoup.find_all('article',{'class':'message--post'}):
             # logger.debug("Caching %s"%post['data-content'])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/FanFicFare-4.2.0/fanficfare/adapters/base_xenforoforum_adapter.py 
new/FanFicFare-4.3.0/fanficfare/adapters/base_xenforoforum_adapter.py
--- old/FanFicFare-4.2.0/fanficfare/adapters/base_xenforoforum_adapter.py       
2021-04-30 15:42:27.000000000 +0200
+++ new/FanFicFare-4.3.0/fanficfare/adapters/base_xenforoforum_adapter.py       
2021-05-30 20:09:12.000000000 +0200
@@ -76,9 +76,12 @@
         return '/'
 
     @classmethod
+    def getURLDomain(cls):
+        return 'https://' + cls.getSiteDomain()
+
+    @classmethod
     def getURLPrefix(cls):
-        # The site domain.  Does have www here, if it uses it.
-        return 'https://' + cls.getSiteDomain() + cls.getPathPrefix()
+        return cls.getURLDomain() + cls.getPathPrefix()
 
     @classmethod
     def getSiteExampleURLs(cls):
@@ -524,18 +527,6 @@
         # author moved down here to take from post URLs.
         self.parse_author(souptag)
 
-        if self.getConfig('author_avatar_cover'):
-            authorcard = self.make_soup(self.get_request(authorUrl+"?card=1"))
-            coverurl = 
'/'+authorcard.find('div',{'class':'avatarCropper'}).find('img')['src']
-            self.setCoverImage(self.url,coverurl)
-            ## 
https://forums.spacebattles.com/members/mp3-1415player.322925/?card=1
-            ## <div class="avatarCropper">
-            ##        <a class="avatar NoOverlay Av322925l" 
href="members/mp3-1415player.322925/">
-            ##                <img 
src="data/avatars/l/322/322925.jpg?1471421076" alt="" style="left: 0px; top: 
-92px; " />
-            ##        </a>
-            ##
-            ## </div>
-
         # Now get first post for description and chapter list if not
         # using threadmarks.
         index_post = self.get_post_body(souptag)
@@ -594,6 +585,13 @@
         authorUrl = self.getURLPrefix()+a['href']
         self.story.addToList('authorUrl',authorUrl)
         self.story.addToList('author',a.text)
+        # 
logger.debug("author_avatar_cover:%s"%self.getConfig('author_avatar_cover'))
+        if self.getConfig('author_avatar_cover'):
+            authorcard = self.make_soup(self.get_request(authorUrl))
+            # logger.debug(authorcard)
+            coverimg = 
authorcard.find('div',{'class':'avatarScaler'}).find('img')
+            if coverimg:
+                self.setCoverImage(self.url,coverimg['src'])
 
     def get_first_post(self,topsoup):
         return topsoup.find('li',{'class':'message'}) # limit first post for 
date stuff below. ('#' posts above)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/FanFicFare-4.2.0/fanficfare/browsercache/basebrowsercache.py 
new/FanFicFare-4.3.0/fanficfare/browsercache/basebrowsercache.py
--- old/FanFicFare-4.2.0/fanficfare/browsercache/basebrowsercache.py    
2021-04-30 15:42:27.000000000 +0200
+++ new/FanFicFare-4.3.0/fanficfare/browsercache/basebrowsercache.py    
2021-05-30 20:09:12.000000000 +0200
@@ -1,6 +1,7 @@
 import sys
 import os
 import time
+import traceback
 
 import gzip
 import zlib
@@ -170,11 +171,15 @@
 
     def get_key_mapping(self,url):
         # logger.debug("get_key_mapping:%s"%url)
-        ## on demamand map loading now.
+        ## on demand map loading now.
         ## browser_cache is shared between configurations
         ## XXX Needs some locking if multi-threading implemented.
         if not self.mapping_loaded:
-            self.do_map_cache_keys()
+            try:
+                self.do_map_cache_keys()
+            except Exception as e:
+                logger.debug(traceback.format_exc())
+                raise BrowserCacheException("Browser Cache Failed to Load with 
error '%s'"%e)
         return self.key_mapping.get(self.minimal_url(url),(None,None))[0]
 
     def get_data(self, url):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/FanFicFare-4.2.0/fanficfare/cli.py 
new/FanFicFare-4.3.0/fanficfare/cli.py
--- old/FanFicFare-4.2.0/fanficfare/cli.py      2021-04-30 15:42:27.000000000 
+0200
+++ new/FanFicFare-4.3.0/fanficfare/cli.py      2021-05-30 20:09:12.000000000 
+0200
@@ -25,9 +25,10 @@
 import logging
 import pprint
 import string
-import os, sys
+import os, sys, platform
 
-version="4.2.0"
+
+version="4.3.0"
 os.environ['CURRENT_VERSION_ID']=version
 
 global_cache = 'global_cache'
@@ -154,6 +155,9 @@
     parser.add_option('-p', '--progressbar',
                       action='store_true', dest='progressbar',
                       help='Display a simple progress bar while 
downloading--one dot(.) per network fetch.', )
+    parser.add_option('--color',
+                      action='store_true', dest='color',
+                      help='Display a errors and warnings in a contrasting 
color.  Requires package colorama on Windows.', )
     parser.add_option('-v', '--version',
                       action='store_true', dest='version',
                       help='Display version and quit.', )
@@ -179,7 +183,6 @@
     if not options.debug:
         logger.setLevel(logging.WARNING)
     else:
-        import platform
         logger.debug("    OS Version:%s"%platform.platform())
         logger.debug("Python Version:%s"%sys.version)
         logger.debug("   FFF Version:%s"%version)
@@ -188,6 +191,26 @@
         print("Version: %s" % version)
         return
 
+    if options.color:
+        if 'Windows' in platform.platform():
+            try:
+                from colorama import init as colorama_init
+                colorama_init()
+            except ImportError:
+                print("Option --color will not work on Windows without 
installing Python package colorama.\nContinue? (y/n)?")
+                if options.interactive:
+                    if not 
sys.stdin.readline().strip().lower().startswith('y'):
+                        return
+                    else:
+                        # for non-interactive, default the response to yes and 
continue processing
+                        print('y')
+        def warn(t):
+            print("\033[{}m{}\033[0m".format(34, t)) # blue
+        def fail(t):
+            print("\033[{}m{}\033[0m".format(31, t)) # red
+    else:
+        warn = fail = print
+
     list_only = any((options.imaplist,
                      options.siteslist,
                      options.list,
@@ -287,17 +310,21 @@
                     do_download(url,
                                 options,
                                 passed_defaultsini,
-                                passed_personalini)
+                                passed_personalini,
+                                warn,
+                                fail)
                 except Exception as e:
                     if len(urls) == 1:
                         raise
-                    print("URL(%s) Failed: Exception (%s). Run URL 
individually for more detail."%(url,e))
+                    fail("URL(%s) Failed: Exception (%s). Run URL individually 
for more detail."%(url,e))
 
 # make rest a function and loop on it.
 def do_download(arg,
                 options,
                 passed_defaultsini,
-                passed_personalini):
+                passed_personalini,
+                warn=print,
+                fail=print):
 
     # Attempt to update an existing epub.
     chaptercount = None
@@ -312,7 +339,7 @@
         try:
             url, chaptercount = get_dcsource_chaptercount(arg)
             if not url:
-                print('No story URL found in epub to update.')
+                fail('No story URL found in epub to update.')
                 return
             print('Updating %s, URL: %s' % (arg, url))
             output_filename = arg
@@ -353,7 +380,7 @@
                 noturl, chaptercount = 
get_dcsource_chaptercount(output_filename)
                 print('Updating %s, URL: %s' % (output_filename, url))
             except Exception as e:
-                print("Failed to read epub for update: (%s) Continuing with 
update=false"%e)
+                warn("Failed to read epub for update: (%s) Continuing with 
update=false"%e)
                 update_story = False
 
         # Check for include_images without no_image_processing. In absence of 
PIL, give warning.
@@ -410,9 +437,9 @@
             if chaptercount == urlchaptercount and not options.metaonly and 
not options.updatealways:
                 print('%s already contains %d chapters.' % (output_filename, 
chaptercount))
             elif chaptercount > urlchaptercount:
-                print('%s contains %d chapters, more than source: %d.' % 
(output_filename, chaptercount, urlchaptercount))
+                warn('%s contains %d chapters, more than source: %d.' % 
(output_filename, chaptercount, urlchaptercount))
             elif chaptercount == 0:
-                print("%s doesn't contain any recognizable chapters, probably 
from a different source.  Not updating." % output_filename)
+                warn("%s doesn't contain any recognizable chapters, probably 
from a different source.  Not updating." % output_filename)
             else:
                 # update now handled by pre-populating the old
                 # images and chapters in the adapter rather than
@@ -484,18 +511,18 @@
             print(json.dumps(metadata, sort_keys=True,
                              indent=2, separators=(',', ':')))
         if adapter.story.chapter_error_count > 0:
-            print("===================\n!!!! %s chapters errored downloading 
%s !!!!\n==================="%(adapter.story.chapter_error_count,
+            warn("===================\n!!!! %s chapters errored downloading %s 
!!!!\n==================="%(adapter.story.chapter_error_count,
                                                         url))
         del adapter
 
     except exceptions.InvalidStoryURL as isu:
-        print(isu)
+        fail(isu)
     except exceptions.StoryDoesNotExist as dne:
-        print(dne)
+        fail(dne)
     except exceptions.UnknownSite as us:
-        print(us)
+        fail(us)
     except exceptions.AccessDenied as ad:
-        print(ad)
+        fail(ad)
 
 def get_configuration(url,
                       passed_defaultsini,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/FanFicFare-4.2.0/fanficfare/defaults.ini 
new/FanFicFare-4.3.0/fanficfare/defaults.ini
--- old/FanFicFare-4.2.0/fanficfare/defaults.ini        2021-04-30 
15:42:27.000000000 +0200
+++ new/FanFicFare-4.3.0/fanficfare/defaults.ini        2021-05-30 
20:09:12.000000000 +0200
@@ -2248,7 +2248,7 @@
 
 [quotev.com]
 use_basic_cache:true
-user_agent:Mozilla/5.0
+use_cloudscraper:true
 slow_down_sleep_time:2
 extra_valid_entries:pages,readers,reads,favorites,searchtags,comments
 pages_label:Pages
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/FanFicFare-4.2.0/setup.py 
new/FanFicFare-4.3.0/setup.py
--- old/FanFicFare-4.2.0/setup.py       2021-04-30 15:42:27.000000000 +0200
+++ new/FanFicFare-4.3.0/setup.py       2021-05-30 20:09:12.000000000 +0200
@@ -26,7 +26,7 @@
     name=package_name,
 
     # Versions should comply with PEP440.
-    version="4.2.0",
+    version="4.3.0",
 
     description='A tool for downloading fanfiction to eBook formats',
     long_description=long_description,
@@ -60,7 +60,6 @@
 
         # Specify the Python versions you support here. In particular, ensure
         # that you indicate whether you support Python 2, Python 3 or both.
-        'Programming Language :: Python :: 2.7',
         # Earlier py3 version may work, but I've not tested them.
         'Programming Language :: Python :: 3.7',
     ],

Reply via email to