Hello community,

here is the log from the commit of package rpmlint-mini for openSUSE:Factory 
checked in at 2020-03-06 21:26:24
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/rpmlint-mini (Old)
 and      /work/SRC/openSUSE:Factory/.rpmlint-mini.new.26092 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "rpmlint-mini"

Fri Mar  6 21:26:24 2020 rev:98 rq:781857 version:1.10

Changes:
--------
--- /work/SRC/openSUSE:Factory/rpmlint-mini/rpmlint-mini.changes        
2020-01-16 22:53:56.399489632 +0100
+++ /work/SRC/openSUSE:Factory/.rpmlint-mini.new.26092/rpmlint-mini.changes     
2020-03-06 21:26:49.961533486 +0100
@@ -1,0 +2,24 @@
+Thu Mar 05 13:03:11 UTC 2020 - [email protected]
+
+- Update to version master:
+  * cron-whitelist: remove bad duplicate entry for texlive-filesystem
+  * verify.py: catch duplicate dictionary entries in JSON
+
+-------------------------------------------------------------------
+Wed Mar 04 09:37:44 UTC 2020 - [email protected]
+
+- Update to version master:
+  * cron-whitelist: add entry for sarg (bsc#1150554)
+
+-------------------------------------------------------------------
+Mon Mar 02 14:29:17 UTC 2020 - [email protected]
+
+rpmlint-security-whitelistings:
+  Update to version master:
+    * cron-whitelist: add opa-ff (bsc#1162255)
+    * cron-whitelist: add matomo (bsc#1150548)
+    * cron-whitelist: preliminary wildcard whitelisting for the notorious 
texlive-filesystem (bsc#1150556)
+    * cron-whitelist: whitelist new nextcloud cron job
+    * first round of whitelisting of safe cron jobs (bsc#1150175)
+
+-------------------------------------------------------------------

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ rpmlint-mini.spec ++++++
--- /var/tmp/diff_new_pack.jp5Q3E/_old  2020-03-06 21:26:53.433535405 +0100
+++ /var/tmp/diff_new_pack.jp5Q3E/_new  2020-03-06 21:26:53.469535424 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package rpmlint-mini
 #
-# Copyright (c) 2019 SUSE LLC
+# Copyright (c) 2020 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed

++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.jp5Q3E/_old  2020-03-06 21:26:53.541535465 +0100
+++ /var/tmp/diff_new_pack.jp5Q3E/_new  2020-03-06 21:26:53.541535465 +0100
@@ -1,4 +1,4 @@
 <servicedata>
 <service name="tar_scm">
                 <param 
name="url">https://github.com/openSUSE/rpmlint-security-whitelistings</param>
-              <param 
name="changesrevision">17823647a36e556ed27d389e9293900c370ca0d7</param></service></servicedata>
\ No newline at end of file
+              <param 
name="changesrevision">d01e019a8010f129b1f5dda34348ab8b466fa6d3</param></service></servicedata>
\ No newline at end of file

++++++ rpmlint-security-whitelistings-master.tar.xz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/rpmlint-security-whitelistings-master/.github/workflows/main.yml 
new/rpmlint-security-whitelistings-master/.github/workflows/main.yml
--- old/rpmlint-security-whitelistings-master/.github/workflows/main.yml        
1970-01-01 01:00:00.000000000 +0100
+++ new/rpmlint-security-whitelistings-master/.github/workflows/main.yml        
2020-03-05 13:47:44.000000000 +0100
@@ -0,0 +1,21 @@
+name: CI
+
+on:
+  push:
+    branches:
+      - '*'
+  pull_request:
+    branches:
+      - master
+
+jobs:
+  build:
+
+    runs-on: ubuntu-latest
+
+    steps:
+    - uses: actions/checkout@v2
+    - name: Verify JSON Syntax
+      run: |
+        sudo apt-get -qq install yajl-tools >/dev/null
+        ./verify.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpmlint-security-whitelistings-master/README.md 
new/rpmlint-security-whitelistings-master/README.md
--- old/rpmlint-security-whitelistings-master/README.md 2019-11-22 
15:12:53.000000000 +0100
+++ new/rpmlint-security-whitelistings-master/README.md 2020-03-05 
13:47:44.000000000 +0100
@@ -75,15 +75,15 @@
                 # issues.
                 "digests": {
 
-                    # the keys are the absolute file paths # that are subject
+                    # the keys are the absolute file paths that are the subject
                     # of the whitelisting
                     #
-                    # the values are of the form <alg>:<digest>, where <alg>
+                    # the values are of the form [alg]:[digest], where [alg]
                     # is a hash algorithm supported by the Python hashlib.
                     "/etc/cron.d/atop": 
"sha256:d8b23c4f9bda803bc8627c23361635a876bc49fc0ace0d98fcd92c7fb33ac430"
 
                     # it is also possible to explicitly whitelist a file with
-                    # arbirary content for special cases where the content of
+                    # arbitrary content for special cases where the content of
                     # the whitelisted file isn't fixed for some reason
                     "/usr/share/atop/atop.daily": "skip:<none>",
                 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/rpmlint-security-whitelistings-master/cron-whitelist.json 
new/rpmlint-security-whitelistings-master/cron-whitelist.json
--- old/rpmlint-security-whitelistings-master/cron-whitelist.json       
2019-11-22 15:12:53.000000000 +0100
+++ new/rpmlint-security-whitelistings-master/cron-whitelist.json       
2020-03-05 13:47:44.000000000 +0100
@@ -9,5 +9,119 @@
                                }
                        }
                }
+       },
+       "cronie-anacron": {
+               "audits": {
+                       "bsc#1150541": {
+                               "comment": "Executes daily, weekly, monthly 
cron jobs",
+                               "digests": {
+                                       "/etc/cron.hourly/0anacron": 
"sha256:aa129d2165f669770b20d20fe5d826f242a069a8f9fc2323333b91d0c9ca40c9"
+                               }
+                       }
+               }
+       },
+       "logdigest": {
+               "audits": {
+                       "bsc#1150546": {
+                               "comment": "scours through log files to find 
'interesting' information and mails it to root",
+                               "digests": {
+                                       "/etc/cron.daily/logdigest": 
"sha256:1dcd29ceaa35914c126bf76b0b00b955de332744636573dc2b907c1c542f81c4",
+                                       
"/etc/cron.d/logdigest-all-ignores-autogeneration": 
"sha256:052d8f93980ade99937c2a4b972d5cf5a77367c9af6c7b02f0e05e8ca5b06797"
+                               }
+                       }
+               }
+       },
+       "patch2mail": {
+               "audits": {
+                       "bsc#1150552": {
+                               "comment": "looks up pending zypper patches and 
sens them out by mail",
+                               "digests": {
+                                       "/etc/cron.daily/patch2mail": 
"sha256:2db3aaa7addba83e60e24fdb868dd6f353bfb76b005fd2a2fdaf079fb54fe597"
+                               }
+                       }
+               }
+       },
+       "tmpwatch": {
+               "audits": {
+                       "bsc#1150557": {
+                               "comment": "removes outdated file in /tmp and 
some other directories",
+                               "digests": {
+                                       "/etc/cron.daily/tmpwatch": 
"sha256:b2e7cb31833f7737215b2bb44d9e8ab42f5b0787ca613b176a490248ad035adb"
+                               }
+                       }
+               }
+       },
+       "cacti": {
+               "audits": {
+                       "bsc#1150534": {
+                               "comment": "collects system data and stores it 
in a local database, complex PHP",
+                               "digests": {
+                                       "/etc/cron.d/cacti": 
"sha256:d7537bab09182f50b7437a267c0743a1f7fb680555c977f7811d70214083aafd"
+                               }
+                       }
+               }
+       },
+       "storeBackup": {
+               "audits": {
+                       "bsc#1150555": {
+                               "comment": "and old school backup solution 
written in Perl",
+                               "digests": {
+                                       "/etc/cron.daily/storebackup": 
"sha256:2d4f43fb71c5f4a7e5a0f24f8b20332bff0fe0f8b57c304ab66420cd88a7bb4c"
+                               }
+                       }
+               }
+       },
+       "nextcloud": {
+               "audits": {
+                       "bsc#1162254": {
+                               "comment": "default-disabled cron job that runs 
PHP cleanup logic for nextcloud as unprivileged user",
+                               "digests": {
+                                       "/etc/cron.d/nextcloud": 
"sha256:176aed8f9adc79cc3d0a75a223730d621cf9435d7baee03823afc558c110b76d"
+                               }
+                       }
+               }
+       },
+       "texlive-filesystem": {
+               "audits": {
+                       "bsc#1150556": {
+                               "comment": "more or less broken security 
concept, disagreement with package maintainer. due to importance of TeX a 
wildcard whitelisting until the matter is settled",
+                               "digests": {
+                                       "/etc/cron.daily/suse-texlive": 
"skip:<none>"
+                               }
+                       }
+               }
+       },
+       "matomo": {
+               "audits": {
+                       "bsc#1150548": {
+                               "comment": "a complex PHP script is called as 
wwwrun user",
+                               "digests": {
+                                       "/etc/cron.d/matomo-archive": 
"sha256:b55bb543d9e890522aaabd016e8e8c80e3d0b6529f10da3ea264348e375eaf82"
+                               }
+                       }
+               }
+       },
+       "opa-fastfabric": {
+               "audits": {
+                       "bsc#1162255": {
+                               "comment": "fibre channel management software, 
this cron job monitors cable health using a complex bash script and C programs",
+                               "digests": {
+                                       "/etc/cron.daily/opa-cablehealth": 
"sha256:7e837d9ba6f1361d63bdb5e716e1f5ce9ac774f22fa79ef32d51a9e0c925c11b"
+                               }
+                       }
+               }
+       },
+       "sarg": {
+               "audits": {
+                       "bsc#1150554": {
+                               "comment": "builds statistics based on Squid 
logfile metadata. include sarg-reports which is SUSE specific and important for 
privilege dropping.",
+                               "digests": {
+                                       "/etc/cron.daily/suse.de-sarg": 
"sha256:d536dc68e198189149048a907ea6d56a7ee9fc732ae8fec5a4072ad06640e359",
+                                       "/etc/cron.monthly/suse.de-sarg": 
"sha256:d536dc68e198189149048a907ea6d56a7ee9fc732ae8fec5a4072ad06640e359",
+                                       "/etc/cron.weekly/suse.de-sarg": 
"sha256:d536dc68e198189149048a907ea6d56a7ee9fc732ae8fec5a4072ad06640e359",
+                                       "/usr/sbin/sarg-reports": 
"sha256:00ad25400bdc2031cd09f9b8f9e56c448c93b6b89a702d36dce6a385d79e637c"
+                               }
+                       }
+               }
        }
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpmlint-security-whitelistings-master/verify.py 
new/rpmlint-security-whitelistings-master/verify.py
--- old/rpmlint-security-whitelistings-master/verify.py 1970-01-01 
01:00:00.000000000 +0100
+++ new/rpmlint-security-whitelistings-master/verify.py 2020-03-05 
13:47:44.000000000 +0100
@@ -0,0 +1,89 @@
+#!/usr/bin/python3
+
+import os, sys
+import subprocess
+import glob
+import json
+
+def printerr(*args, **kwargs):
+
+       kwargs["file"] = sys.stderr
+
+       print(*args, **kwargs)
+
+def checkJSONFile(path):
+
+       with open(path, 'rb') as json_fd:
+               try:
+
+                       print("Checking", path, "... ", end = '')
+                       sys.stdout.flush()
+
+                       res = subprocess.call(
+                               ["json_verify"],
+                               close_fds = True,
+                               shell = False,
+                               stdin = json_fd
+                       )
+
+                       if res == 0:
+                               return True
+                       else:
+                               print(path, "is not valid JSON!")
+                               return False
+
+               except FileNotFoundError:
+
+                       printerr("Couldn't find json_verify. You need to 
install yajl (yet-another-json-lib).")
+                       sys.exit(2)
+
+def checkDuplicateEntries(path):
+
+       # the JSON spec does allow for duplicate dictionary key entries to
+       # appear. And the python json parser acts in some (undefined) way in
+       # this situation, no error is raised.
+       #
+       # therefore hook into the parser to detect duplicate keys and error
+       # out in this case
+
+       errors = []
+
+       def check_duplicates(ordered_pairs):
+
+               seen = set()
+
+               for k, _ in ordered_pairs:
+                       if k in seen:
+                               # don't raise an exception right away we want
+                               # to collect all duplicates not just the first
+                               # one
+                               errors.append(
+                                       "duplicate dictionary key in {} 
encountered: {}".format(path, k)
+                               )
+                       else:
+                               seen.add(k)
+
+               return ordered_pairs
+
+       with open(path, 'rb') as json_fd:
+
+               data = json.load(json_fd, object_pairs_hook = check_duplicates)
+
+       for error in errors:
+               printerr(error)
+
+       return len(errors) == 0
+
+our_dir = os.path.dirname(os.path.realpath(__file__))
+pathspec = os.path.join( our_dir, "*.json" )
+res = 0
+
+for json_file in glob.glob(pathspec):
+
+       if not checkJSONFile(json_file):
+               res = 1
+
+       if not checkDuplicateEntries(json_file):
+               res = 1
+
+sys.exit(res)


Reply via email to