Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-fanficfare for
openSUSE:Factory checked in at 2024-01-29 22:28:27
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-fanficfare (Old)
and /work/SRC/openSUSE:Factory/.python-fanficfare.new.1815 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-fanficfare"
Mon Jan 29 22:28:27 2024 rev:53 rq:1142076 version:4.30.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-fanficfare/python-fanficfare.changes
2023-11-21 21:34:35.157030116 +0100
+++
/work/SRC/openSUSE:Factory/.python-fanficfare.new.1815/python-fanficfare.changes
2024-01-29 22:29:08.185707041 +0100
@@ -1,0 +2,22 @@
+Sun Jan 28 10:24:33 UTC 2024 - Dirk Müller <[email protected]>
+
+- update to 4.30.0:
+ * Install attached plugin zip file, or use Calibre's 'Get
+ plugins' feature.
+ * FanFicFare is delivered as a `pip` Python package.
+ * Run `fanficfare -h` from command-line.
+ * Update translations.
+ * Better handling of &<> entities with stripHTML() and chapter
+ titles. #1019
+ * SB(but not SV) removed RSS link from thread list title.
+ Closes #1017
+ * adapter_storiesonlinenet: Allow /n/ as well as /s/ paths
+ * adapter_storiesonlinenet: Update for chapter URL change for
+ paid subs. See #1014, thanks bpothier!
+ * adapter_storiesonlinenet: Update for chapter URL change. See
+ #1013
+ * adapter_fanfictionnet: Only use data-original cover images.
+ * BrowserCache should ignore usecache flag, that's for
+ BasicCache.
+
+-------------------------------------------------------------------
Old:
----
FanFicFare-4.29.0.tar.gz
New:
----
FanFicFare-4.30.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-fanficfare.spec ++++++
--- /var/tmp/diff_new_pack.zg75sk/_old 2024-01-29 22:29:08.925733864 +0100
+++ /var/tmp/diff_new_pack.zg75sk/_new 2024-01-29 22:29:08.925733864 +0100
@@ -1,7 +1,7 @@
#
# spec file for package python-fanficfare
#
-# Copyright (c) 2023 SUSE LLC
+# Copyright (c) 2024 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -20,7 +20,7 @@
%define modnamedown fanficfare
%define skip_python2 1
Name: python-fanficfare
-Version: 4.29.0
+Version: 4.30.0
Release: 0
Summary: Tool for making eBooks from stories on fanfiction and other
web sites
License: GPL-3.0-only
@@ -49,7 +49,7 @@
Requires: python-requests-file
Requires: python-urllib3
Requires(post): update-alternatives
-Requires(postun):update-alternatives
+Requires(postun): update-alternatives
BuildArch: noarch
%python_subpackages
++++++ FanFicFare-4.29.0.tar.gz -> FanFicFare-4.30.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/FanFicFare-4.29.0/calibre-plugin/__init__.py
new/FanFicFare-4.30.0/calibre-plugin/__init__.py
--- old/FanFicFare-4.29.0/calibre-plugin/__init__.py 2023-11-01
17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/calibre-plugin/__init__.py 2023-12-01
19:25:25.000000000 +0100
@@ -33,7 +33,7 @@
from calibre.customize import InterfaceActionBase
# pulled out from FanFicFareBase for saving in prefs.py
-__version__ = (4, 29, 0)
+__version__ = (4, 30, 0)
## Apparently the name for this class doesn't matter--it was still
## 'demo' for the first few versions.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/FanFicFare-4.29.0/calibre-plugin/fff_plugin.py
new/FanFicFare-4.30.0/calibre-plugin/fff_plugin.py
--- old/FanFicFare-4.29.0/calibre-plugin/fff_plugin.py 2023-11-01
17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/calibre-plugin/fff_plugin.py 2023-12-01
19:25:25.000000000 +0100
@@ -1936,6 +1936,8 @@
if merge:
if len(good_list) < 1:
info_dialog(self.gui, _('FanFicFare: ')+_('No Good Stories for
Anthology'),
+ ## where -> were -- typo kept to not invalidate
existing translation.
+ ## should fix if it ever changes for other
reasons. See #1011
_('No good stories/updates where downloaded,
Anthology creation/update aborted.'),
show=True,
show_copy_button=False)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/FanFicFare-4.29.0/calibre-plugin/translations/es.po
new/FanFicFare-4.30.0/calibre-plugin/translations/es.po
--- old/FanFicFare-4.29.0/calibre-plugin/translations/es.po 2023-11-01
17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/calibre-plugin/translations/es.po 2023-12-01
19:25:25.000000000 +0100
@@ -7,7 +7,7 @@
# DarÃo Hereñú, 2015-2016,2018
# DarÃo Hereñú, 2018
# 7d35375d9c63f2d676cbd56e8f3d4fdc, 2016
-# Jellby <[email protected]>, 2014-2022
+# Jellby <[email protected]>, 2014-2023
# Antonio Mireles <[email protected]>, 2016
# Juan Jaramillo <[email protected]>, 2016
# JimmXinu, 2015
@@ -16,7 +16,7 @@
"Project-Id-Version: calibre-plugins\n"
"POT-Creation-Date: 2023-10-21 09:40-0500\n"
"PO-Revision-Date: 2014-06-19 22:55+0000\n"
-"Last-Translator: Jellby <[email protected]>, 2014-2022\n"
+"Last-Translator: Jellby <[email protected]>, 2014-2023\n"
"Language-Team: Spanish
(http://app.transifex.com/calibre/calibre-plugins/language/es/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
@@ -696,20 +696,20 @@
#: config.py:1041
msgid "Set Calibre Cover Only for New Books"
-msgstr ""
+msgstr "Establecer portada de calibre sólo para libros nuevos"
#: config.py:1042
msgid ""
"Set the Calibre cover from EPUB only for new\n"
"books, not updates to existing books."
-msgstr ""
+msgstr "Establecer la portada de calibre a partir del EPUB sólo para libros
nuevos, no en actualizaciones de libros existentes."
#: config.py:1050
msgid ""
"Generate a Calibre book cover image when Calibre metadata is updated.<br "
"/>Note that %(gc)s(Plugin) will only run if there is a %(gc)s setting "
"configured below for Default or the appropriate site."
-msgstr ""
+msgstr "Generar una portada para el libro en calibre cuando se actualizan los
metadatos de calibre.<br />Tenga en cuenta que %(gc)s (complemento) se
ejecutará sólo si hay una opción %(gc)s configurada a continuación para
Predeterminado o el sitio adecuado."
#: config.py:1053
msgid "Generate Calibre Cover:"
@@ -727,7 +727,7 @@
#: config.py:1080
msgid "Inject/update the generated cover inside EPUB"
-msgstr ""
+msgstr "Insertar o actualizar la portada generada en el EPUB"
#: config.py:1081
msgid ""
@@ -1172,11 +1172,11 @@
#: config.py:1600
msgid "Set Calibre Series URL"
-msgstr ""
+msgstr "Establecer URL de la serie en calibre"
#: config.py:1601
msgid "Set Calibre Series URL to Series's URL on story site."
-msgstr ""
+msgstr "Establecer el URL de la serie en la página de la historia como URL de
la serie en calibre."
#: config.py:1605
msgid "Set 'Series [0]' for New Anthologies?"
@@ -2187,7 +2187,7 @@
msgid ""
"Existing epub contains %d chapters, web site only has %d. Use Overwrite or "
"force_update_epub_always to force update."
-msgstr ""
+msgstr "El epub existente contiene %d capÃtulos, el sitio de internet sólo
tiene %d. Use Reemplazar o force_update_epub_always para forzar la
actualización."
#: fff_plugin.py:1552 jobs.py:372
msgid ""
@@ -2249,7 +2249,7 @@
msgid ""
"FanFicFare will try to update metadata again once. Close any interfering "
"programs (such as Windows File Explorer) before closing this dialog."
-msgstr ""
+msgstr "FanFicFare intentará actualizar los metadatos una vez más. Cierre
cualquier programa que pueda interferir (como el Explorador de archivos de
Windows) antes de cerrar este cuadro de diálogo."
#: fff_plugin.py:1875 fff_plugin.py:1876
msgid "Finished Adding/Updating %d books."
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/FanFicFare-4.29.0/fanficfare/adapters/adapter_fanfictionnet.py
new/FanFicFare-4.30.0/fanficfare/adapters/adapter_fanfictionnet.py
--- old/FanFicFare-4.29.0/fanficfare/adapters/adapter_fanfictionnet.py
2023-11-01 17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/fanficfare/adapters/adapter_fanfictionnet.py
2023-12-01 19:25:25.000000000 +0100
@@ -308,11 +308,10 @@
img = soup.select_one('img.lazy.cimage')
cover_url=img['data-original']
except:
- img = soup.select_one('img.cimage:not(.lazy)')
- if img:
- cover_url=img['src']
- ## Nov 19, 2020, ffnet lazy cover images returning 0 byte
- ## files.
+ ## Nov 2023 - src is always "/static/images/d_60_90.jpg" now
+ ## Only take cover if there's data-original
+ ## Primary motivator is to prevent unneeded author page hits.
+ pass
logger.debug("cover_url:%s"%cover_url)
authimg_url = ""
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/FanFicFare-4.29.0/fanficfare/adapters/adapter_storiesonlinenet.py
new/FanFicFare-4.30.0/fanficfare/adapters/adapter_storiesonlinenet.py
--- old/FanFicFare-4.29.0/fanficfare/adapters/adapter_storiesonlinenet.py
2023-11-01 17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/fanficfare/adapters/adapter_storiesonlinenet.py
2023-12-01 19:25:25.000000000 +0100
@@ -60,7 +60,7 @@
if not m.group('chapter') and m.group('title'):
title = m.group('title')
# normalized story URL.
- self._setURL('https://' + self.getSiteDomain() +
'/s/'+self.story.getMetadata('storyId')+title)
+ self._setURL('https://' + self.getSiteDomain() +
'/'+m.group('path')+'/'+self.story.getMetadata('storyId')+title)
else:
raise exceptions.InvalidStoryURL(url,
self.getSiteDomain(),
@@ -84,10 +84,10 @@
@classmethod
def getSiteExampleURLs(cls):
- return "http://"+cls.getSiteDomain()+"/s/1234
http://"+cls.getSiteDomain()+"/s/1234:4010
https://"+cls.getSiteDomain()+"/s/1234
https://"+cls.getSiteDomain()+"/s/1234:4010"
+ return "https://"+cls.getSiteDomain()+"/s/1234/story-title
https://"+cls.getSiteDomain()+"/n/1234/story-title"
def getSiteURLPattern(self):
- return
r"https?://"+re.escape(self.getSiteDomain())+r"/(s|library)/(storyInfo.php\?id=)?(?P<id>\d+)(?P<chapter>:\d+)?(?P<title>/.+)?((;\d+)?$|(:i)?$)?"
+ return
r"https?://"+re.escape(self.getSiteDomain())+r"/(?P<path>s|n|library)/(storyInfo.php\?id=)?(?P<id>\d+)(?P<chapter>:\d+)?(?P<title>/.+)?((;\d+)?$|(:i)?$)?"
@classmethod
def getTheme(cls):
@@ -221,14 +221,17 @@
soup = soup.find('article')
# Find the chapters:
- chapters = soup.findAll('a',
href=re.compile(r'^/s/'+self.story.getMetadata('storyId')+r":\d+(/.*)?$"))
+ # <a href="/s/00001/This-is-a-test/1">Chapter 1</a>
+ # <a href="/n/00001/This-is-a-test/1">Chapter 1</a>
+ chapters = soup.select('div#index-list a[href*="/s/"],a[href*="/n/"]')
+ logger.debug(chapters)
if len(chapters) != 0:
logger.debug("Number of chapters: {0}".format(len(chapters)))
for chapter in chapters:
# just in case there's tags, like <i> in chapter titles.
self.add_chapter(chapter,'https://'+self.host+chapter['href'])
else:
-
self.add_chapter(self.story.getMetadata('title'),'https://'+self.host+'/s/'+self.story.getMetadata('storyId'))
+
self.add_chapter(self.story.getMetadata('title'),self.story.getMetadata('storyUrl'))
self.getStoryMetadataFromAuthorPage()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/FanFicFare-4.29.0/fanficfare/adapters/base_adapter.py
new/FanFicFare-4.30.0/fanficfare/adapters/base_adapter.py
--- old/FanFicFare-4.29.0/fanficfare/adapters/base_adapter.py 2023-11-01
17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/fanficfare/adapters/base_adapter.py 2023-12-01
19:25:25.000000000 +0100
@@ -171,12 +171,6 @@
meta = defaultdict(unicode,othermeta) # copy othermeta
if title:
title = stripHTML(title,remove_all_entities=False)
- # Put the basic 3 html entities back in.
- # bs4 is 'helpfully' removing them.
- ## Now with more checking because bs4 is apparently
- ## not *always* removing them now.
- if '&' in title and '&' not in title:
- title =
title.replace('&','&').replace('<','<').replace('>','>')
else:
## A default value for when there's no chapter
## title. Cropped up once with adapter_novelonlinefullcom
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/FanFicFare-4.29.0/fanficfare/adapters/base_xenforo2forum_adapter.py
new/FanFicFare-4.30.0/fanficfare/adapters/base_xenforo2forum_adapter.py
--- old/FanFicFare-4.29.0/fanficfare/adapters/base_xenforo2forum_adapter.py
2023-11-01 17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/fanficfare/adapters/base_xenforo2forum_adapter.py
2023-12-01 19:25:25.000000000 +0100
@@ -110,7 +110,8 @@
# logger.debug(desc)
title = header.find('h1',{'class':'threadmarkListingHeader-name'})
if title:
- title.a.decompose() # remove RSS link.
+ if title.a:
+ title.a.decompose() # remove RSS link.
self.story.setMetadata("threadmarks_title",stripHTML(title))
statusdt = header.find('dt',string="Index progress")
if statusdt:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/FanFicFare-4.29.0/fanficfare/adapters/base_xenforoforum_adapter.py
new/FanFicFare-4.30.0/fanficfare/adapters/base_xenforoforum_adapter.py
--- old/FanFicFare-4.29.0/fanficfare/adapters/base_xenforoforum_adapter.py
2023-11-01 17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/fanficfare/adapters/base_xenforoforum_adapter.py
2023-12-01 19:25:25.000000000 +0100
@@ -406,7 +406,7 @@
if after:
# logger.debug("AFTER "*10)
after=False
- url,name = atag['href'],stripHTML(atag)
+ url,name =
atag['href'],stripHTML(atag,remove_all_entities=False)
date = self.get_threadmark_date(tm_item)
words,kwords = self.get_threadmark_words(tm_item)
if 'http' not in url:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/FanFicFare-4.29.0/fanficfare/cli.py
new/FanFicFare-4.30.0/fanficfare/cli.py
--- old/FanFicFare-4.29.0/fanficfare/cli.py 2023-11-01 17:41:05.000000000
+0100
+++ new/FanFicFare-4.30.0/fanficfare/cli.py 2023-12-01 19:25:25.000000000
+0100
@@ -28,7 +28,7 @@
import os, sys, platform
-version="4.29.0"
+version="4.30.0"
os.environ['CURRENT_VERSION_ID']=version
global_cache = 'global_cache'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/FanFicFare-4.29.0/fanficfare/fetchers/cache_browser.py
new/FanFicFare-4.30.0/fanficfare/fetchers/cache_browser.py
--- old/FanFicFare-4.29.0/fanficfare/fetchers/cache_browser.py 2023-11-01
17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/fanficfare/fetchers/cache_browser.py 2023-12-01
19:25:25.000000000 +0100
@@ -52,46 +52,46 @@
with self.cache_lock:
# logger.debug("BrowserCacheDecorator fetcher_do_request")
fromcache=True
- if usecache:
- try:
- d = self.cache.get_data(url)
- parsedUrl = urlparse(url)
+ # if usecache: # Ignore usecache flag--it's for BasicCache.
+ try:
+ d = self.cache.get_data(url)
+ parsedUrl = urlparse(url)
- open_tries = 2
+ open_tries = 2
+ # logger.debug("domain_open_tries:%s:"%domain_open_tries)
+ while( fetcher.getConfig("use_browser_cache_only") and
+ fetcher.getConfig("open_pages_in_browser",False) and
+ not d and open_tries
+ and domain_open_tries.get(parsedUrl.netloc,0) <
fetcher.getConfig("open_pages_in_browser_tries_limit",6) ):
+ logger.debug("\n\nopen page in browser:
%s\ntries:%s\n"%(url,domain_open_tries.get(parsedUrl.netloc,None)))
+ webbrowser.open(url)
+ # logger.debug("domain_open_tries:%s:"%domain_open_tries)
+ # if parsedUrl.netloc not in domain_open_tries:
+ # logger.debug("First time for (%s) extra
sleep"%parsedUrl.netloc)
+ # time.sleep(10)
+ fromcache=False
+ read_try_sleeps = [2, 2, 4, 5, 6]
+ while not d and read_try_sleeps:
+ time.sleep(read_try_sleeps.pop(0))
+ logger.debug("Checking for cache...")
+ d = self.cache.get_data(url)
+ # logger.debug(d)
+ open_tries -= 1
+ domain_open_tries[parsedUrl.netloc] =
domain_open_tries.get(parsedUrl.netloc,0) + 1
# logger.debug("domain_open_tries:%s:"%domain_open_tries)
- while( fetcher.getConfig("use_browser_cache_only") and
- fetcher.getConfig("open_pages_in_browser",False) and
- not d and open_tries
- and domain_open_tries.get(parsedUrl.netloc,0) <
fetcher.getConfig("open_pages_in_browser_tries_limit",6) ):
- logger.debug("\n\nopen page in browser:
%s\ntries:%s\n"%(url,domain_open_tries.get(parsedUrl.netloc,None)))
- webbrowser.open(url)
- #
logger.debug("domain_open_tries:%s:"%domain_open_tries)
- # if parsedUrl.netloc not in domain_open_tries:
- # logger.debug("First time for (%s) extra
sleep"%parsedUrl.netloc)
- # time.sleep(10)
- fromcache=False
- read_try_sleeps = [2, 2, 4, 5, 6]
- while not d and read_try_sleeps:
- time.sleep(read_try_sleeps.pop(0))
- logger.debug("Checking for cache...")
- d = self.cache.get_data(url)
- # logger.debug(d)
- open_tries -= 1
- domain_open_tries[parsedUrl.netloc] =
domain_open_tries.get(parsedUrl.netloc,0) + 1
- #
logger.debug("domain_open_tries:%s:"%domain_open_tries)
- except Exception as e:
- logger.debug(traceback.format_exc())
- raise exceptions.BrowserCacheException("Browser Cache
Failed to Load with error '%s'"%e)
+ except Exception as e:
+ logger.debug(traceback.format_exc())
+ raise exceptions.BrowserCacheException("Browser Cache Failed
to Load with error '%s'"%e)
- # had a d = b'' which showed HIT, but failed.
- logger.debug(make_log('BrowserCache',method,url,True if d else
False))
- # logger.debug(d)
- if d:
- domain_open_tries[parsedUrl.netloc] = 0
- logger.debug("domain_open_tries:%s:"%domain_open_tries)
- logger.debug("fromcache:%s"%fromcache)
- return
FetcherResponse(d,redirecturl=url,fromcache=fromcache)
+ # had a d = b'' which showed HIT, but failed.
+ logger.debug(make_log('BrowserCache',method,url,True if d else
False))
+ # logger.debug(d)
+ if d:
+ domain_open_tries[parsedUrl.netloc] = 0
+ logger.debug("domain_open_tries:%s:"%domain_open_tries)
+ logger.debug("fromcache:%s"%fromcache)
+ return FetcherResponse(d,redirecturl=url,fromcache=fromcache)
if fetcher.getConfig("use_browser_cache_only"):
raise exceptions.HTTPErrorFFF(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/FanFicFare-4.29.0/fanficfare/htmlcleanup.py
new/FanFicFare-4.30.0/fanficfare/htmlcleanup.py
--- old/FanFicFare-4.29.0/fanficfare/htmlcleanup.py 2023-11-01
17:41:05.000000000 +0100
+++ new/FanFicFare-4.30.0/fanficfare/htmlcleanup.py 2023-12-01
19:25:25.000000000 +0100
@@ -69,6 +69,12 @@
else:
# bs4 already converts all the entities to UTF8 chars.
retval = soup.get_text(strip=True)
+ if not remove_all_entities:
+ # put basic 3 entities back
+ if '&' in retval and '&' not in retval:
+ # check in case called more than once.
+ retval = retval.replace('&','&')
+ retval = retval.replace('<','<').replace('>','>')
# some change in the python3 branch started making '\xc2\xa0'
# instead of ' '
return ensure_text(retval).replace(u'\xc2\xa0',' ').strip()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/FanFicFare-4.29.0/fanficfare/story.py
new/FanFicFare-4.30.0/fanficfare/story.py
--- old/FanFicFare-4.29.0/fanficfare/story.py 2023-11-01 17:41:05.000000000
+0100
+++ new/FanFicFare-4.30.0/fanficfare/story.py 2023-12-01 19:25:25.000000000
+0100
@@ -734,7 +734,8 @@
self.chapter_error_count = 0
self.direct_fetcher = None
-
logger.debug("use_flaresolverr_proxy:%s"%self.getConfig('use_flaresolverr_proxy'))
+ if self.getConfig('use_flaresolverr_proxy'):
+
logger.debug("use_flaresolverr_proxy:%s"%self.getConfig('use_flaresolverr_proxy'))
if self.getConfig('use_flaresolverr_proxy') == 'directimages':
from . import fetchers
fetcher = fetchers.RequestsFetcher(self.getConfig,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/FanFicFare-4.29.0/pyproject.toml
new/FanFicFare-4.30.0/pyproject.toml
--- old/FanFicFare-4.29.0/pyproject.toml 2023-11-01 17:41:05.000000000
+0100
+++ new/FanFicFare-4.30.0/pyproject.toml 2023-12-01 19:25:25.000000000
+0100
@@ -16,7 +16,7 @@
#
# For a discussion on single-sourcing the version, see
# https://packaging.python.org/guides/single-sourcing-package-version/
-version = "4.29.0"
+version = "4.30.0"
# This is a one-line description or tagline of what your project does. This
# corresponds to the "Summary" metadata field: