Hi Marta, Marta Rybczynska <[email protected]> escreveu no dia segunda, 2/01/2023 à(s) 07:03:
> The database update has been done on the original file. In case of > network connection issues, temporary outage of the NVD server or > a similar situation, the function could exit with incomplete data > in the database. This patch solves the issue by performing the update > on a copy of the database. It replaces the main one only if the whole > update was successful. > > See https://bugzilla.yoctoproject.org/show_bug.cgi?id=14929 > > Reported-by: Alberto Pianon <[email protected]> > Signed-off-by: Marta Rybczynska <[email protected]> > --- > .../recipes-core/meta/cve-update-db-native.bb | 81 +++++++++++++------ > 1 file changed, 56 insertions(+), 25 deletions(-) > > diff --git a/meta/recipes-core/meta/cve-update-db-native.bb > b/meta/recipes-core/meta/cve-update-db-native.bb > index 642fda5395..89804b9e5c 100644 > --- a/meta/recipes-core/meta/cve-update-db-native.bb > +++ b/meta/recipes-core/meta/cve-update-db-native.bb > @@ -21,6 +21,8 @@ CVE_DB_UPDATE_INTERVAL ?= "86400" > # Timeout for blocking socket operations, such as the connection attempt. > CVE_SOCKET_TIMEOUT ?= "60" > > +CVE_DB_TEMP_FILE ?= "${CVE_CHECK_DB_DIR}/temp_nvdcve_1.1.db" > + > python () { > if not bb.data.inherits_class("cve-check", d): > raise bb.parse.SkipRecipe("Skip recipe when cve-check class is > not loaded.") > @@ -32,19 +34,15 @@ python do_fetch() { > """ > import bb.utils > import bb.progress > - import sqlite3, urllib, urllib.parse, gzip > - from datetime import date > + import shutil > > bb.utils.export_proxies(d) > > - YEAR_START = 2002 > - > db_file = d.getVar("CVE_CHECK_DB_FILE") > db_dir = os.path.dirname(db_file) > + db_tmp_file = d.getVar("CVE_DB_TEMP_FILE") > > - cve_socket_timeout = int(d.getVar("CVE_SOCKET_TIMEOUT")) > - > - cleanup_db_download(db_file) > + cleanup_db_download(db_file, db_tmp_file) > > # The NVD database changes once a day, so no need to update more > frequently > # Allow the user to force-update > @@ -62,9 +60,55 @@ python do_fetch() { > pass > > bb.utils.mkdirhier(db_dir) > + if os.path.exists(db_file): > + shutil.copy2(db_file, db_tmp_file) > + > + if update_db_file(db_tmp_file, d) == True: > + # Update downloaded correctly, can swap files > + shutil.move(db_tmp_file, db_file) > + else: > + # Update failed, do not modify the database > + bb.note("CVE database update failed") > + os.remove(db_tmp_file) > +} > + > +do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}" > +do_fetch[file-checksums] = "" > +do_fetch[vardeps] = "" > + > +def cleanup_db_download(db_file, db_tmp_file): > + """ > + Cleanup the download space from possible failed downloads > + """ > + if os.path.exists("{0}-journal".format(db_file)): > + # If a journal is present the last update might have been > interrupted. In that case, > + # just wipe any leftovers and force the DB to be recreated. > + os.remove("{0}-journal".format(db_file)) > + > + if os.path.exists(db_file): > + os.remove(db_file) > + > + if os.path.exists("{0}-journal".format(db_tmp_file)): > + # If a journal is present the last update might have been > interrupted. In that case, > + # just wipe any leftovers and force the DB to be recreated. > + os.remove("{0}-journal".format(db_tmp_file)) > + > + if os.path.exists(db_tmp_file): > + os.remove(db_tmp_file) > + > It seems to me that this function is a duplication of the old version with an extra argument. So I think that using the old function version and call it with the proper argument does the same: cleanup_db_download(db_file) cleanup_db_download(db_tmp_file) Jose > +def update_db_file(db_tmp_file, d): > + """ > + Update the given database file > + """ > + import bb.utils, bb.progress > + from datetime import date > + import urllib, gzip, sqlite3 > + > + YEAR_START = 2002 > + cve_socket_timeout = int(d.getVar("CVE_SOCKET_TIMEOUT")) > > # Connect to database > - conn = sqlite3.connect(db_file) > + conn = sqlite3.connect(db_tmp_file) > initialize_db(conn) > > with bb.progress.ProgressHandler(d) as ph, > open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f: > @@ -82,7 +126,7 @@ python do_fetch() { > except urllib.error.URLError as e: > cve_f.write('Warning: CVE db update error, Unable to > fetch CVE data.\n\n') > bb.warn("Failed to fetch CVE data (%s)" % e.reason) > - return > + return False > > if response: > for l in response.read().decode("utf-8").splitlines(): > @@ -92,7 +136,7 @@ python do_fetch() { > break > else: > bb.warn("Cannot parse CVE metadata, update failed") > - return > + return False > > # Compare with current db last modified date > cursor = conn.execute("select DATE from META where YEAR = ?", > (year,)) > @@ -113,7 +157,7 @@ python do_fetch() { > except urllib.error.URLError as e: > cve_f.write('Warning: CVE db update error, CVE data > is outdated.\n\n') > bb.warn("Cannot parse CVE data (%s), update failed" % > e.reason) > - return > + return False > else: > bb.debug(2, "Already up to date (last modified %s)" % > last_modified) > # Update success, set the date to cve_check file. > @@ -122,20 +166,7 @@ python do_fetch() { > > conn.commit() > conn.close() > -} > - > -do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}" > -do_fetch[file-checksums] = "" > -do_fetch[vardeps] = "" > - > -def cleanup_db_download(db_file): > - if os.path.exists("{0}-journal".format(db_file)): > - # If a journal is present the last update might have been > interrupted. In that case, > - # just wipe any leftovers and force the DB to be recreated. > - os.remove("{0}-journal".format(db_file)) > - > - if os.path.exists(db_file): > - os.remove(db_file) > + return True > > def initialize_db(conn): > with conn: > -- > 2.35.1 > > > > > -- Best regards, José Quaresma
-=-=-=-=-=-=-=-=-=-=-=- Links: You receive all messages sent to this group. View/Reply Online (#175315): https://lists.openembedded.org/g/openembedded-core/message/175315 Mute This Topic: https://lists.openembedded.org/mt/96002809/21656 Group Owner: [email protected] Unsubscribe: https://lists.openembedded.org/g/openembedded-core/unsub [[email protected]] -=-=-=-=-=-=-=-=-=-=-=-
