janjwerner-confluent commented on code in PR #15447: URL: https://github.com/apache/druid/pull/15447#discussion_r1410922998
########## owasp-dependency-check-suppressions.xml: ########## @@ -18,473 +18,137 @@ ~ under the License. --> <suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.3.xsd"> + <!-- False positives --> <suppress> - <!-- druid-indexing-hadoop.jar is mistaken for hadoop --> <notes><![CDATA[ - file name: org.apache.druid:druid-indexing-hadoop - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.druid/druid\-indexing\-hadoop@.*$</packageUrl> - <cve>CVE-2012-4449</cve> - <cve>CVE-2017-3162</cve> - <cve>CVE-2018-8009</cve> - <cve>CVE-2022-26612</cve> - </suppress> - <suppress> - <!-- druid-processing.jar is mistaken for org.processing:processing --> - <notes><![CDATA[ - file name: org.apache.druid:druid-processing - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.druid/druid\-processing@.*$</packageUrl> - <cve>CVE-2018-1000840</cve> - </suppress> - <suppress> - <!-- These CVEs are for the python SDK, but Druid uses the Java SDK --> - <notes><![CDATA[ - file name: openstack-swift - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.jclouds\.api/openstack\-swift@.*$</packageUrl> - <cve>CVE-2013-7109</cve> - <cve>CVE-2016-0737</cve> - <cve>CVE-2016-0738</cve> - <cve>CVE-2017-16613</cve> - </suppress> - <suppress> - <notes><![CDATA[ - file name: openstack-keystone-1.9.1.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.jclouds\.api/openstack\-keystone@.*$</packageUrl> - <!-- These CVEs are for the python SDK, but Druid uses the Java SDK --> - <cve>CVE-2015-7546</cve> - <cve>CVE-2020-12689</cve> - <cve>CVE-2020-12690</cve> - <cve>CVE-2020-12691</cve> - - <!-- This CVE affects the server --> - <cve>CVE-2021-3563</cve> - </suppress> - - <suppress> - <notes><![CDATA[ - file name: json-path-2.3.0.jar + file name: json-path-2.3.0.jar jackson-core-2.12.7.jar ]]></notes> - <packageUrl regex="true">^pkg:maven/net\.minidev/json\-path@.*$</packageUrl> <cve>CVE-2022-45688</cve> + <cve>CVE-2023-35116</cve> </suppress> <suppress> - <!-- - the suppressions here aren't currently applicable, but can be resolved once we update the version - --> <notes><![CDATA[ - file name: jackson-databind-2.10.5.1.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/com\.fasterxml\.jackson\.core/jackson\-databind@.*$</packageUrl> - <!-- CVE-2022-42003 and CVE-2022-42004 are related to UNWRAP_SINGLE_VALUE_ARRAYS which we do not use - https://nvd.nist.gov/vuln/detail/CVE-2022-42003 - https://nvd.nist.gov/vuln/detail/CVE-2022-42004 - --> - <cve>CVE-2022-42003</cve> - <cve>CVE-2022-42004</cve> + file name: grpc-context-1.27.2.jar + ]]></notes> + <packageUrl regex="true">^pkg:maven/io\.grpc/[email protected]$</packageUrl> + <cve>CVE-2023-4785</cve> <!-- Not applicable to gRPC Java - https://nvd.nist.gov/vuln/detail/CVE-2023-4785 --> + <cve>CVE-2023-33953</cve> <!-- Not applicable to gRPC Java - https://cloud.google.com/support/bulletins#gcp-2023-022 --> + <cve>CVE-2023-32732</cve> </suppress> <suppress> - <!-- Pulled in by io.kubernetes:client-java and kafka_2.13 but not fixed in either place yet --> - <!-- jose4j before v0.9.3 allows attackers to set a low iteration count of 1000 or less --> <notes><![CDATA[ - file name: jose4j-0.7.3.jar + file name: commons-compress-1.23.0.jar ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.bitbucket\.b_c/jose4j@.*$</packageUrl> - <cve>CVE-2023-31582</cve> + <cve>CVE-2023-42503</cve> </suppress> <suppress> - <!-- Not much for us to do as a user of the client lib, and no patch is available, - see https://github.com/kubernetes/kubernetes/issues/97076 --> <notes><![CDATA[ - file name: client-java-10.0.1.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/io\.kubernetes/client\-java.*@10.0.1$</packageUrl> - <cve>CVE-2020-8554</cve> + file name: guava-31.1-jre.jar + ]]></notes> + <cve>CVE-2020-8908</cve> </suppress> - <!-- FIXME: These are suppressed so that CI can enforce that no new vulnerable dependencies are added. --> - <suppress> - <!-- - ~ TODO: Fix by updating hibernate-validator. - - ~ Note hibernate-validator:5.3.1 introduces a change that requires an EL implementation to be in the classpath: - ~ https://developer.jboss.org/wiki/HibernateValidatorMigrationGuide#jive_content_id_531Final - ~ - ~ For example, updating hibernate-validator causes hadoop ingestion tasks to fail: - ~ - ~ Error: com.google.inject.CreationException: Unable to create injector, see the following errors: - ~ - ~ 1) An exception was caught and reported. Message: HV000183: Unable to initialize 'javax.el.ExpressionFactory'. Check that you have the EL dependencies on the classpath, or use ParameterMessageInterpolator instead - ~ at com.google.inject.internal.InjectorShell$Builder.build(InjectorShell.java:138) - ~ - ~ 2) No implementation for javax.validation.Validator was bound. - ~ at org.apache.druid.guice.ConfigModule.configure(ConfigModule.java:39) - ~ - ~ 2 errors - ~ at com.google.inject.internal.Errors.throwCreationExceptionIfErrorsExist(Errors.java:470) - ~ at com.google.inject.internal.InternalInjectorCreator.initializeStatically(InternalInjectorCreator.java:155) - ~ at com.google.inject.internal.InternalInjectorCreator.build(InternalInjectorCreator.java:107) - ~ at com.google.inject.Guice.createInjector(Guice.java:99) - ~ at com.google.inject.Guice.createInjector(Guice.java:73) - ~ at org.apache.druid.guice.GuiceInjectors.makeStartupInjector(GuiceInjectors.java:56) - ~ at org.apache.druid.indexer.HadoopDruidIndexerConfig.<clinit>(HadoopDruidIndexerConfig.java:102) - ~ at org.apache.druid.indexer.HadoopDruidIndexerMapper.setup(HadoopDruidIndexerMapper.java:53) - ~ at org.apache.druid.indexer.DetermineHashedPartitionsJob$DetermineCardinalityMapper.setup(DetermineHashedPartitionsJob.java:279) - ~ at org.apache.druid.indexer.DetermineHashedPartitionsJob$DetermineCardinalityMapper.run(DetermineHashedPartitionsJob.java:334) - ~ at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787) - ~ at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) - ~ at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:175) - ~ at java.security.AccessController.doPrivileged(Native Method) - ~ at javax.security.auth.Subject.doAs(Subject.java:422) - ~ at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1844) - ~ at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:169) - ~ Caused by: javax.validation.ValidationException: HV000183: Unable to initialize 'javax.el.ExpressionFactory'. Check that you have the EL dependencies on the classpath, or use ParameterMessageInterpolator instead - ~ at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.buildExpressionFactory(ResourceBundleMessageInterpolator.java:102) - ~ at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.<init>(ResourceBundleMessageInterpolator.java:45) - ~ at org.hibernate.validator.internal.engine.ConfigurationImpl.getDefaultMessageInterpolator(ConfigurationImpl.java:423) - ~ at org.hibernate.validator.internal.engine.ConfigurationImpl.getDefaultMessageInterpolatorConfiguredWithClassLoader(ConfigurationImpl.java:575) - ~ at org.hibernate.validator.internal.engine.ConfigurationImpl.getMessageInterpolator(ConfigurationImpl.java:364) - ~ at org.hibernate.validator.internal.engine.ValidatorFactoryImpl.<init>(ValidatorFactoryImpl.java:148) - ~ at org.hibernate.validator.HibernateValidator.buildValidatorFactory(HibernateValidator.java:38) - ~ at org.hibernate.validator.internal.engine.ConfigurationImpl.buildValidatorFactory(ConfigurationImpl.java:331) - ~ at javax.validation.Validation.buildDefaultValidatorFactory(Validation.java:110) - ~ at org.apache.druid.guice.ConfigModule.configure(ConfigModule.java:39) - ~ at com.google.inject.spi.Elements$RecordingBinder.install(Elements.java:340) - ~ at com.google.inject.spi.Elements.getElements(Elements.java:110) - ~ at com.google.inject.internal.InjectorShell$Builder.build(InjectorShell.java:138) - ~ at com.google.inject.internal.InternalInjectorCreator.build(InternalInjectorCreator.java:104) - ~ ... 14 more - ~ Caused by: java.lang.NoSuchMethodError: javax.el.ExpressionFactory.newInstance()Ljavax/el/ExpressionFactory; - ~ at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.buildExpressionFactory(ResourceBundleMessageInterpolator.java:98) - ~ ... 27 more - --> - <notes><![CDATA[ - file name: hibernate-validator-5.3.6.Final.jar - file name: hibernate-validator-5.2.5.Final.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.hibernate/hibernate\-validator@.*$</packageUrl> - <cve>CVE-2017-7536</cve> - <cve>CVE-2019-10219</cve> <!-- We don't use SafeHtml validator annotation https://nvd.nist.gov/vuln/detail/CVE-2019-10219 --> - <cve>CVE-2019-14900</cve> <!-- Not applicable to hibernate validator https://github.com/hibernate/hibernate-orm/pull/3438 --> - <cve>CVE-2020-10693</cve> <!-- We don't take user input in constraint violation message https://hibernate.atlassian.net/browse/HV-1774 --> - <cve>CVE-2020-25638</cve> - </suppress> - <suppress> - <!-- TODO: Fix by updating curator-x-discovery to > 4.2.0 and updating hadoop --> - <notes><![CDATA[ - file name: jackson-mapper-asl-1.9.13.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.codehaus\.jackson/jackson\-mapper\[email protected]$</packageUrl> - <cvssBelow>10</cvssBelow> <!-- suppress all CVEs for jackson-mapper-asl:1.9.13 ince it is via curator-x-discovery --> - </suppress> - <suppress> - <!-- TODO: Fix by updating org.apache.druid.java.util.http.client.NettyHttpClient to use netty 4 --> - <notes><![CDATA[ - file name: netty-3.10.6.Final.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/io\.netty/[email protected]$</packageUrl> - <cve>CVE-2019-16869</cve> - <cve>CVE-2019-20444</cve> - <cve>CVE-2019-20445</cve> - <cve>CVE-2020-11612</cve> - <cve>CVE-2021-21290</cve> <!-- We don't use HttpPostRequestDecoder or HttpPostMultiPartRequestDecoder which uses vulnerable AbstractDiskHttpData - https://github.com/advisories/GHSA-5mcr-gq6c-3hq2 --> - <cve>CVE-2021-21295</cve> <!-- We don't use HTTP2MultiplexCodec or Http2FrameCodec or Http2StreamFrameToHttpObjectCodec affected or convert HTTP/2 to HTTP/1.1 requests - https://github.com/advisories/GHSA-wm47-8v5p-wjpj --> - <cve>CVE-2021-21409</cve> <!-- We don't use Http2HeaderFrame or convert HTTP/2 to HTTP/1.1 requests https://github.com/advisories/GHSA-f256-j965-7f32 --> - <cve>CVE-2021-37136</cve> - <cve>CVE-2021-37137</cve> - <cve>CVE-2021-43797</cve> <!-- We don't decode user HTTP requests nor forward them to remote systems, we also don't support for java 6 or lower - https://github.com/advisories/GHSA-wx5j-54mm-rqqq --> - <cve>CVE-2022-24823</cve> <!-- We don't decode user HTTP requests nor forward them to remote systems, we also don't support for java 6 or lower - https://github.com/advisories/GHSA-269q-hmxg-m83q --> - <cve>CVE-2022-41881</cve> - <cve>CVE-2023-34462</cve> <!-- Suppressed since netty requests in Druid are internal, and not user-facing --> - </suppress> - <suppress> - <!-- TODO: Fix by upgrading hadoop-auth version --> - <notes><![CDATA[ - file name: nimbus-jose-jwt-4.41.1.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/com\.nimbusds/nimbus\-jose\[email protected]$</packageUrl> - <cve>CVE-2019-17195</cve> - </suppress> - <suppress> - <!-- This CVE is a false positive. The CVE is not for apacheds-i18n --> - <notes><![CDATA[ - file name: apacheds-i18n-2.0.0-M15.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.directory\.server/apacheds\-i18n@.*$</packageUrl> - <cve>CVE-2020-7791</cve> - </suppress> - <suppress> - <!-- TODO: Fix by using com.datastax.oss:java-driver-core instead of com.netflix.astyanax:astyanax in extensions-contrib/cassandra-storage --> - <notes><![CDATA[ - file name: libthrift-0.6.1.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.thrift/[email protected]$</packageUrl> - <cve>CVE-2016-5397</cve> - <cve>CVE-2018-1320</cve> - <cve>CVE-2019-0205</cve> - </suppress> - <suppress> - <!-- TODO: Fix by using com.datastax.oss:java-driver-core instead of com.netflix.astyanax:astyanax in extensions-contrib/cassandra-storage --> - <notes><![CDATA[ - file name: jettison-1.*.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.codehaus\.jettison/jettison@1.*$</packageUrl> - <cve>CVE-2022-40149</cve> - <cve>CVE-2022-40150</cve> - <cve>CVE-2022-45685</cve> - <cve>CVE-2022-45693</cve> - <cve>CVE-2023-1436</cve> + <!-- CVE-2022-4244 is affecting plexus-utils package, + plexus-interpolation is wrongly matched - https://github.com/jeremylong/DependencyCheck/issues/5973 --> + <suppress base="true"> + <packageUrl regex="true">^pkg:maven/org\.codehaus\.plexus/plexus-interpolation@.*$</packageUrl> + <cve>CVE-2022-4244</cve> + <cve>CVE-2022-4245</cve> </suppress> + <suppress> - <!-- We need to wait for 17.0.0 of https://github.com/kubernetes-client/java/releases --> - <!-- We need to update several other components to move to Snakeyaml 2.0 to address CVE-2022-1471 --> - <!-- Snakeyaml 1.33 added to dependencyManagement in main pom file --> + <!-- This presumably applies to maven build system --> <notes><![CDATA[ - file name: snakeyaml-1.33.jar - ]]></notes> - <cve>CVE-2022-1471</cve> - <!-- false positive --> - <cve>CVE-2023-2251</cve> - <cve>CVE-2022-3064</cve> + file name: maven-settings + ]]></notes> + <cve>CVE-2021-26291</cve> </suppress> + <suppress> + <!-- LDAP authentication check bypass FP no exploitability analysis --> <notes><![CDATA[ - file name: htrace-core4-4.0.1-incubating.jar (shaded: com.fasterxml.jackson.core:jackson-annotations:2.4.0) - ]]></notes> - <packageUrl regex="true">^pkg:maven/com\.fasterxml\.jackson\.core/jackson\[email protected]$</packageUrl> - <cvssBelow>10</cvssBelow> <!-- suppress all CVEs for jackson-annotations:2.4.0 since it is via htrace-core4 --> + file name: derby-10.14.2.0.jar + ]]></notes> + <cve>CVE-2022-46337</cve> </suppress> + <suppress> + <!-- False positive fixed in 9.4.52 + https://nvd.nist.gov/vuln/detail/CVE-2023-36479 --> <notes><![CDATA[ - file name: htrace-core4-4.0.1-incubating.jar (shaded: com.fasterxml.jackson.core:jackson-core:2.4.0) - ]]></notes> - <packageUrl regex="true">^pkg:maven/com\.fasterxml\.jackson\.core/jackson\[email protected]$</packageUrl> - <cvssBelow>10</cvssBelow> <!-- suppress all CVEs for jackson-core:2.4.0 since it is via htrace-core4 --> + file name: jetty-servlets-9.4.53.v20231009.jar + ]]></notes> + <cve>CVE-2023-36479</cve> </suppress> + <suppress> <!-- - ~ TODO: Fix by updating hadoop-common used by extensions-core/parquet-extensions. Possibly need to change - ~ HdfsStorageDruidModule.configure()->FileSystem.get(conf) as well. + the suppressions here aren't currently applicable, but can be resolved once we update the version --> <notes><![CDATA[ - file name: htrace-core4-4.0.1-incubating.jar (shaded: com.fasterxml.jackson.core:jackson-databind:2.4.0) + file name: jackson-databind-2.10.5.1.jar ]]></notes> - <packageUrl regex="true">^pkg:maven/com\.fasterxml\.jackson\.core/jackson\[email protected]$</packageUrl> - <cve>CVE-2018-14721</cve> <!-- cvss of 10.0 --> - <cvssBelow>10</cvssBelow> <!-- suppress all CVEs for jackson-databind:2.4.0 since it is via htrace-core4 --> + <packageUrl regex="true">^pkg:maven/com\.fasterxml\.jackson\.core/jackson\-databind@.*$</packageUrl> + <!-- CVE-2022-42003 and CVE-2022-42004 are related to UNWRAP_SINGLE_VALUE_ARRAYS which we do not use + https://nvd.nist.gov/vuln/detail/CVE-2022-42003 + https://nvd.nist.gov/vuln/detail/CVE-2022-42004 + --> + <cve>CVE-2022-42003</cve> + <cve>CVE-2022-42004</cve> </suppress> + <suppress> - <!-- - ~ TODO: Fix by updating parquet version in extensions-core/parquet-extensions. - --> + <!-- Avatica server itself is not affected. Vulnerability exists only on client. --> <notes><![CDATA[ - file name: parquet-jackson-1.11.0.jar (shaded: com.fasterxml.jackson.core:jackson-{core,databind}:2.9.10) + file name: avatica-server-1.23.0.jar ]]></notes> - <packageUrl regex="true">^pkg:maven/com\.fasterxml\.jackson\.core/jackson\-.*@2.9.10$</packageUrl> - <cvssBelow>10</cvssBelow> <!-- suppress all CVEs for jackson-{core,databind}:2.9.0 since it is via parquet transitive dependencies --> + <cve>CVE-2022-36364</cve> + <cve>CVE-2022-39135</cve> + <cve>CVE-2020-13955</cve> </suppress> - <suppress> - <notes><![CDATA[ - file name: node-sass:4.13.1 - The vulnerability is fixed in 4.13.1: https://github.com/sass/node-sass/issues/2816#issuecomment-575136455 - - But the dependency check plugin thinks it's still broken as the affected/fixed versions has not been updated on - Sonatype OSS Index: https://ossindex.sonatype.org/vuln/c97f4ae7-be1f-4f71-b238-7c095b126e74 - ]]></notes> - <packageUrl regex="true">^pkg:npm/node\-sass@.*$</packageUrl> - <vulnerabilityName>CWE-400: Uncontrolled Resource Consumption ('Resource Exhaustion')</vulnerabilityName> - </suppress> - <suppress> - <!-- - ~ TODO: Fix when Apache Ranger 2.1 is released - --> - <notes><![CDATA[ - file name: kafka_2.11-2.0.0.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.kafka/[email protected]$</packageUrl> - <cve>CVE-2019-12399</cve> - <cve>CVE-2018-17196</cve> - </suppress> - <suppress> - <!-- - ~ TODO: Fix when Apache Ranger 2.1 is released - - transitive dep from apache-ranger, upgrading to 2.1.0 adds other CVEs, staying at ranger 2.0.0 for now - --> - <notes><![CDATA[ - file name: kafka-clients-2.0.0.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.kafka/[email protected]$</packageUrl> - <cve>CVE-2019-12399</cve> - <cve>CVE-2018-17196</cve> - <cve>CVE-2023-25194</cve> - </suppress> - <suppress> - <notes><![CDATA[ - file name: kafka-clients-3.2.0.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.kafka/kafka\-clients@.*$</packageUrl> - <cve>CVE-2022-34917</cve> - </suppress> - <suppress> - <!-- - ~ ambari-metrics-emitter, druid-ranger-security - --> - <notes><![CDATA[ - file name: log4j-1.2.17.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/log4j/[email protected]$</packageUrl> - <cve>CVE-2019-17571</cve> - <cve>CVE-2021-4104</cve> - <cve>CVE-2020-9493</cve> - <cve>CVE-2022-23307</cve> - <cve>CVE-2022-23305</cve> - <cve>CVE-2022-23302</cve> - <cve>CVE-2023-26464</cve> - </suppress> + <!-- DoS when using expression evaluator.guess --> <suppress> <notes><![CDATA[ - file name: log4j-core-2.17.1.jar + file name: janino-3.1.9.jar ]]></notes> - <packageUrl regex="true">^pkg:maven/org.apache.logging.log4j/[email protected]$</packageUrl> - <cve>CVE-2022-33915</cve> - </suppress> - <suppress> - <notes><![CDATA[ - file name: ambari-metrics-common-2.7.0.0.0.jar - ]]></notes> - <cve>CVE-2022-45855</cve> - <cve>CVE-2022-42009</cve> - <!-- Suppress hadoop CVEs that not applicable to hadoop-annotations --> - <cve>CVE-2022-25168</cve> <!-- Affected FileUtil.unTar(File, File) API isn't present in hadoop-annotations --> - <cve>CVE-2021-33036</cve> <!-- Only applicable to hadoop-yarn-server --> - </suppress> - <suppress> - <!-- - - TODO: The lastest version of ambari-metrics-common is 2.7.0.0.0, released in July 2018. - --> - <notes><![CDATA[ - file name: ambari-metrics-common-2.7.0.0.0.jar (shaded: io.netty:netty:3.10.5.Final) - ]]></notes> - <packageUrl regex="true">^pkg:maven/io\.netty/[email protected]$</packageUrl> - <cve>CVE-2019-16869</cve> - <cve>CVE-2019-20444</cve> - <cve>CVE-2019-20445</cve> - <cve>CVE-2021-37136</cve> - <cve>CVE-2021-37137</cve> - <cve>CVE-2021-4104</cve> - <cve>CVE-2020-9493</cve> - <cve>CVE-2022-23307</cve> - <cve>CVE-2022-23305</cve> - <cve>CVE-2022-23302</cve> - <cve>CVE-2022-41881</cve> - <cve>CVE-2020-11612</cve> - </suppress> - <suppress> - <!-- - - TODO: The lastest version of ambari-metrics-common is 2.7.0.0.0, released in July 2018. - --> - <notes><![CDATA[ - file name: ambari-metrics-common-2.7.0.0.0.jar (shaded: org.apache.hadoop:hadoop-annotations:2.6.0) - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.hadoop/hadoop\-annotations@.*$</packageUrl> - <cve>CVE-2015-1776</cve> - <cve>CVE-2016-3086</cve> - <cve>CVE-2016-5393</cve> - <cve>CVE-2016-6811</cve> - <cve>CVE-2017-3162</cve> - <cve>CVE-2018-11768</cve> - <cve>CVE-2018-1296</cve> - <cve>CVE-2018-8009</cve> - <cve>CVE-2018-8029</cve> - </suppress> - <suppress> - <notes><![CDATA[ - file name: hadoop-*-3.3.1.jar - ]]></notes> - <cve>CVE-2018-11765</cve> - <cve>CVE-2020-9492</cve> - <cve>CVE-2021-31684</cve> - <cve>CVE-2021-35517</cve> - <cve>CVE-2021-35516</cve> - <cve>CVE-2021-35515</cve> - <cve>CVE-2021-36090</cve> - <cve>CVE-2022-2048</cve> - <cve>CVE-2022-3509</cve> - <cve>CVE-2022-40152</cve> - </suppress> - <suppress> - <!-- The CVE is not applicable to kafka-clients. --> - <notes><![CDATA[ - file name: kafka-clients-2.8.0.jar - ]]></notes> - <cve>CVE-2021-26291</cve> - </suppress> - <suppress until="2021-12-30"> - <!-- Suppress this until https://github.com/apache/druid/issues/11028 is resolved. --> - <notes><![CDATA[ - This vulnerability should be fixed soon and the suppression should be removed. - ]]></notes> - <cve>CVE-2020-13949</cve> + <cve>CVE-2023-33546</cve> </suppress> <suppress> - <!-- (ranger, ambari, and aliyun-oss) these vulnerabilities are legit, but their latest releases still use the vulnerable jackson version --> - <notes><![CDATA[ - file name: jackson-xc-1.9.x.jar or jackson-jaxrs-1.9.x.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.codehaus\.jackson/jackson-(xc|jaxrs)@1.9.*$</packageUrl> - <cve>CVE-2018-14718</cve> - <cve>CVE-2018-7489</cve> - <cve>CVE-2022-42003</cve> - <cve>CVE-2022-42004</cve> - </suppress> - <suppress> - <!-- aliyun-oss --> - <notes><![CDATA[ - file name: ini4j-0.5.4.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.ini4j/ini4j@.*$</packageUrl> - <vulnerabilityName>CVE-2022-41404</vulnerabilityName> - </suppress> - <suppress> - <!-- Transitive dependency from apache-ranger, latest ranger version 2.1.0 still uses solr 7.7.1--> + <!-- from extensions using hadoop-client-runtime, these dependencies are shaded in the jar --> <notes><![CDATA[ - file name: solr-solrj-7.7.1.jar + file name: hadoop-client-runtime-3.3.6.jar ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.apache\.solr/[email protected]$</packageUrl> - <cve>CVE-2020-13957</cve> - <cve>CVE-2019-17558</cve> - <cve>CVE-2019-0193</cve> - <cve>CVE-2020-13941</cve> - <cve>CVE-2021-29943</cve> - <cve>CVE-2021-27905</cve> - <cve>CVE-2021-29262</cve> - <cve>CVE-2021-44548</cve> + <!-- this one is windows only - https://nvd.nist.gov/vuln/detail/CVE-2022-26612 --> + <cve>CVE-2022-26612</cve> + <!-- this one seems to apply to backend server - https://nvd.nist.gov/vuln/detail/CVE-2023-25613 --> + <cve>CVE-2023-25613</cve> + <cve>CVE-2023-2976</cve> <!-- hadoop-client-runtime isn't using com.google.common.io.FileBackedOutputStream --> + <!-- CVE from shaded dependency nimbus-jose-jwt, fixed in upcoming Hadoop release version - + https://github.com/apache/hadoop/commit/ad49ddda0e1d9632c8c9fcdc78fca8244e1248c9 --> + <cve>CVE-2023-1370</cve> + <cve>CVE-2023-37475</cve> <!-- Suppressing since CVE wrongly linked to apache:avro project - https://github.com/jeremylong/DependencyCheck/issues/5843 --> + <cve>CVE-2023-39410</cve> <!-- This seems to be a legitimate vulnerability. But there is no fix as of yet in Hadoop repo --> + <cve>CVE-2023-44487</cve> <!-- Occurs in the version of Hadoop used by Jetty, but it hasn't been fixed by Hadoop yet--> + <cve>CVE-2023-36478</cve> <!-- Occurs in the version of Hadoop used by Jetty, but it hasn't been fixed by Hadoop yet--> </suppress> + <!-- those are false positives, no other tools report any of those CVEs in the hadoop package --> <suppress> - <!-- Transitive dependency from aliyun-sdk-oss, there is currently no newer version of jdom2 as well--> - <notes><![CDATA[ - file name: jdom2-2.0.6.jar - ]]></notes> - <packageUrl regex="true">^pkg:maven/org\.jdom/[email protected]$</packageUrl> - <cve>CVE-2021-33813</cve> - </suppress> + <notes><![CDATA[ + file name: hadoop-*-3.3.1.jar Review Comment: do we need additional justification for those? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
