'How to Override log4j with log4j2 version to resolve "SocketServer class vulnerable to deserialization" for apache-core_2.12 version
How to Override log4j version 1.2.17 with log4j-core 2.16.0 version to resolve "SocketServer class vulnerable to deserialization" for spark-core_2.12 binaries.
I tried to exclude log4j - 1.2.17 from spark-core_2.12, but the build is failing with below error
java.lang.NoClassDefFoundError: org/apache/log4j/spi/Filter
at com.optum.iqs.runtime.counters.CountersFactoryTest.setup(CountersFactoryTest.java:20)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.RunBefores.invokeMethod(RunBefores.java:33)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24)
at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:89)
at org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:41)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:541)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:763)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:463)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:209)
Caused by: java.lang.ClassNotFoundException: org.apache.log4j.spi.Filter
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:355)
at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
... 19 more
java.lang.NullPointerException
at com.optum.iqs.runtime.counters.CountersFactoryTest.tearDown(CountersFactoryTest.java:44)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.RunAfters.invokeMethod(RunAfters.java:46)
at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:89)
at org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:41)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:541)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:763)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:463)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:209)
My CountersFactoryTest.java class content is
package com.optum.iqs.runtime.counters;
import java.io.IOException;
import java.util.Map;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
public class CountersFactoryTest {
public static SparkSession sparkSession;
@BeforeClass
public static void setup() throws IOException {
sparkSession = SparkSession.builder().appName("JunitSession").master("local").getOrCreate();
CountersFactory.initialize(sparkSession.sparkContext());
}
@Test
public void testIqsLongCounter() {
Dataset<String> input = sparkSession.read().textFile("src/test/resources/input.txt").as(Encoders.STRING());
Dataset<String> output = input.map(new SampleMapFunction(), Encoders.STRING());
output.show();
Long counter = CountersFactory.getCounters().get("input records").value();
Assert.assertEquals(3L, counter.longValue());
}
@Test
public void testIqsOutputRecordCounter() {
Dataset<String> input = sparkSession.read().textFile("src/test/resources/input.txt").as(Encoders.STRING());
Dataset<String> output = input.map(new SampleMapFunctionOutputCounter(), Encoders.STRING());
output.show();
Map<String, Long> counter = CountersFactory.getOutputCounters().get("output records").value();
Assert.assertEquals(3L, counter.get("key").longValue());
}
@AfterClass
public static void tearDown() {
sparkSession.stop();
}
}
reference to excluding log4j from spark core dependency
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.12</artifactId>
<version>3.0.1</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
Is spark-core going to upgrade log4j dependency to latest version ?
Solution 1:[1]
[Remark: CVE-2019-1751
is almost impossible to trigger, unless you are running the SocketServer
explicitly (cf. this question). On the other hand Log4j 1.2.17 also suffers from CVE-2021-4104
, which might be easier to exploit.]
Since Log4j 2.x is not backward compatible with Log4j 1.2 upgrading is not as simple as replacing one library with the other. You need to replace the log4j
dependency with the bridge between Log4j 1.2 and Log4j 2.x API (log4j-1.2-api
):
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.12</artifactId>
<version>3.0.1</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<version>2.17.2</version>
</dependency>
This should solve the ClassNotFoundException
, but if you want to actually save the logs somewhere, you need to install a backend for the Log4j 2.x API. log4j-core
is the standard one.
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
Solution | Source |
---|---|
Solution 1 |