'I am trying to setup spark in local but getting error

Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties Setting default log level to "WARN". To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel). Welcome to ____ __ / / ___ / / \ / _ / _ `/ / '/ // ./,// //\ version 3.2.1 //

Using Scala version 2.13.5 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_333) Type in expressions to have them evaluated. Type :help for more information. 22/05/13 11:13:50 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 22/05/13 11:13:52 ERROR SparkContext: Error initializing SparkContext. java.lang.reflect.InvocationTargetException at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.spark.executor.Executor.addReplClassLoaderIfNeeded(Executor.scala:909) at org.apache.spark.executor.Executor.(Executor.scala:160) at org.apache.spark.scheduler.local.LocalEndpoint.(LocalSchedulerBackend.scala:64) at org.apache.spark.scheduler.local.LocalSchedulerBackend.start(LocalSchedulerBackend.scala:132) at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:220) at org.apache.spark.SparkContext.(SparkContext.scala:581) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2690) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:949) at scala.Option.getOrElse(Option.scala:201) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:943) at org.apache.spark.repl.Main$.createSparkSession(Main.scala:114) at $line3.$read$$iw.(:5) at $line3.$read.(:4) at $line3.$read$.() at $line3.$eval$.$print$lzycompute(:6) at $line3.$eval$.$print(:5) at $line3.$eval.$print() at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:670) at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1006) at scala.tools.nsc.interpreter.IMain.$anonfun$doInterpret$1(IMain.scala:506) at scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36) at scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader.scala:116) at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:43) at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:505) at scala.tools.nsc.interpreter.IMain.$anonfun$doInterpret$3(IMain.scala:519) at scala.tools.nsc.interpreter.IMain.doInterpret(IMain.scala:519) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:503) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:501) at scala.tools.nsc.interpreter.IMain.$anonfun$quietRun$1(IMain.scala:216) at scala.tools.nsc.interpreter.shell.ReplReporterImpl.withoutPrintingResults(Reporter.scala:64) at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:216) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$interpretPreamble$1(ILoop.scala:924) at scala.collection.immutable.List.foreach(List.scala:333) at scala.tools.nsc.interpreter.shell.ILoop.interpretPreamble(ILoop.scala:924) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$run$3(ILoop.scala:963) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.tools.nsc.interpreter.shell.ILoop.echoOff(ILoop.scala:90) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$run$2(ILoop.scala:963) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.tools.nsc.interpreter.IMain.withSuppressedSettings(IMain.scala:1406) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$run$1(ILoop.scala:954) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.tools.nsc.interpreter.shell.ReplReporterImpl.withoutPrintingResults(Reporter.scala:64) at scala.tools.nsc.interpreter.shell.ILoop.run(ILoop.scala:954) at org.apache.spark.repl.Main$.doMain(Main.scala:84) at org.apache.spark.repl.Main$.main(Main.scala:59) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:955) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1043) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1052) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) Caused by: java.net.URISyntaxException: Illegal character in path at index 26: spark://localhost:57283/C:\classes at java.net.URI$Parser.fail(URI.java:2845) at java.net.URI$Parser.checkChars(URI.java:3018) at java.net.URI$Parser.parseHierarchical(URI.java:3102) at java.net.URI$Parser.parse(URI.java:3050) at java.net.URI.(URI.java:588) at org.apache.spark.repl.ExecutorClassLoader.(ExecutorClassLoader.scala:57) ... 67 more 22/05/13 11:13:52 ERROR Utils: Uncaught exception in thread main java.lang.NullPointerException at org.apache.spark.scheduler.local.LocalSchedulerBackend.org$apache$spark$scheduler$local$LocalSchedulerBackend$$stop(LocalSchedulerBackend.scala:173) at org.apache.spark.scheduler.local.LocalSchedulerBackend.stop(LocalSchedulerBackend.scala:144) at org.apache.spark.scheduler.TaskSchedulerImpl.stop(TaskSchedulerImpl.scala:927) at org.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:2567) at org.apache.spark.SparkContext.$anonfun$stop$12(SparkContext.scala:2086) at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1442) at org.apache.spark.SparkContext.stop(SparkContext.scala:2086) at org.apache.spark.SparkContext.(SparkContext.scala:677) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2690) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:949) at scala.Option.getOrElse(Option.scala:201) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:943) at org.apache.spark.repl.Main$.createSparkSession(Main.scala:114) at $line3.$read$$iw.(:5) at $line3.$read.(:4) at $line3.$read$.() at $line3.$eval$.$print$lzycompute(:6) at $line3.$eval$.$print(:5) at $line3.$eval.$print() at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:670) at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1006) at scala.tools.nsc.interpreter.IMain.$anonfun$doInterpret$1(IMain.scala:506) at scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36) at scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader.scala:116) at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:43) at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:505) at scala.tools.nsc.interpreter.IMain.$anonfun$doInterpret$3(IMain.scala:519) at scala.tools.nsc.interpreter.IMain.doInterpret(IMain.scala:519) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:503) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:501) at scala.tools.nsc.interpreter.IMain.$anonfun$quietRun$1(IMain.scala:216) at scala.tools.nsc.interpreter.shell.ReplReporterImpl.withoutPrintingResults(Reporter.scala:64) at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:216) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$interpretPreamble$1(ILoop.scala:924) at scala.collection.immutable.List.foreach(List.scala:333) at scala.tools.nsc.interpreter.shell.ILoop.interpretPreamble(ILoop.scala:924) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$run$3(ILoop.scala:963) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.tools.nsc.interpreter.shell.ILoop.echoOff(ILoop.scala:90) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$run$2(ILoop.scala:963) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.tools.nsc.interpreter.IMain.withSuppressedSettings(IMain.scala:1406) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$run$1(ILoop.scala:954) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.tools.nsc.interpreter.shell.ReplReporterImpl.withoutPrintingResults(Reporter.scala:64) at scala.tools.nsc.interpreter.shell.ILoop.run(ILoop.scala:954) at org.apache.spark.repl.Main$.doMain(Main.scala:84) at org.apache.spark.repl.Main$.main(Main.scala:59) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:955) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1043) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1052) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 22/05/13 11:13:52 WARN MetricsSystem: Stopping a MetricsSystem that is not running 22/05/13 11:13:52 ERROR Main: Failed to initialize Spark session. java.lang.reflect.InvocationTargetException at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.spark.executor.Executor.addReplClassLoaderIfNeeded(Executor.scala:909) at org.apache.spark.executor.Executor.(Executor.scala:160) at org.apache.spark.scheduler.local.LocalEndpoint.(LocalSchedulerBackend.scala:64) at org.apache.spark.scheduler.local.LocalSchedulerBackend.start(LocalSchedulerBackend.scala:132) at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:220) at org.apache.spark.SparkContext.(SparkContext.scala:581) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2690) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:949) at scala.Option.getOrElse(Option.scala:201) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:943) at org.apache.spark.repl.Main$.createSparkSession(Main.scala:114) at $line3.$read$$iw.(:5) at $line3.$read.(:4) at $line3.$read$.() at $line3.$eval$.$print$lzycompute(:6) at $line3.$eval$.$print(:5) at $line3.$eval.$print() at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:670) at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1006) at scala.tools.nsc.interpreter.IMain.$anonfun$doInterpret$1(IMain.scala:506) at scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36) at scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader.scala:116) at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:43) at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:505) at scala.tools.nsc.interpreter.IMain.$anonfun$doInterpret$3(IMain.scala:519) at scala.tools.nsc.interpreter.IMain.doInterpret(IMain.scala:519) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:503) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:501) at scala.tools.nsc.interpreter.IMain.$anonfun$quietRun$1(IMain.scala:216) at scala.tools.nsc.interpreter.shell.ReplReporterImpl.withoutPrintingResults(Reporter.scala:64) at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:216) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$interpretPreamble$1(ILoop.scala:924) at scala.collection.immutable.List.foreach(List.scala:333) at scala.tools.nsc.interpreter.shell.ILoop.interpretPreamble(ILoop.scala:924) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$run$3(ILoop.scala:963) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.tools.nsc.interpreter.shell.ILoop.echoOff(ILoop.scala:90) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$run$2(ILoop.scala:963) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.tools.nsc.interpreter.IMain.withSuppressedSettings(IMain.scala:1406) at scala.tools.nsc.interpreter.shell.ILoop.$anonfun$run$1(ILoop.scala:954) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.tools.nsc.interpreter.shell.ReplReporterImpl.withoutPrintingResults(Reporter.scala:64) at scala.tools.nsc.interpreter.shell.ILoop.run(ILoop.scala:954) at org.apache.spark.repl.Main$.doMain(Main.scala:84) at org.apache.spark.repl.Main$.main(Main.scala:59) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:955) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1043) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1052) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) Caused by: java.net.URISyntaxException: Illegal character in path at index 26: spark://localhost:57283/C:\classes at java.net.URI$Parser.fail(URI.java:2845) at java.net.URI$Parser.checkChars(URI.java:3018) at java.net.URI$Parser.parseHierarchical(URI.java:3102) at java.net.URI$Parser.parse(URI.java:3050) at java.net.URI.(URI.java:588) at org.apache.spark.repl.ExecutorClassLoader.(ExecutorClassLoader.scala:57) ... 67 more 22/05/13 11:13:52 ERROR Utils: Uncaught exception in thread shutdown-hook-0 java.lang.ExceptionInInitializerError at org.apache.spark.executor.Executor.stop(Executor.scala:333) at org.apache.spark.executor.Executor.$anonfun$stopHookReference$1(Executor.scala:76) at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:214) at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$2(ShutdownHookManager.scala:188) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2019) at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$1(ShutdownHookManager.scala:188) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.util.Try$.apply(Try.scala:210) at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188) at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:750) Caused by: java.lang.NullPointerException at org.apache.spark.shuffle.ShuffleBlockPusher$.(ShuffleBlockPusher.scala:465) ... 16 more 22/05/13 11:13:52 WARN ShutdownHookManager: ShutdownHook '$anon$2' failed, java.util.concurrent.ExecutionException: java.lang.ExceptionInInitializerError java.util.concurrent.ExecutionException: java.lang.ExceptionInInitializerError at java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.util.concurrent.FutureTask.get(FutureTask.java:206) at org.apache.hadoop.util.ShutdownHookManager.executeShutdown(ShutdownHookManager.java:124) at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:95) Caused by: java.lang.ExceptionInInitializerError at org.apache.spark.executor.Executor.stop(Executor.scala:333) at org.apache.spark.executor.Executor.$anonfun$stopHookReference$1(Executor.scala:76) at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:214) at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$2(ShutdownHookManager.scala:188) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2019) at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$1(ShutdownHookManager.scala:188) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at scala.util.Try$.apply(Try.scala:210) at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188) at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:750) Caused by: java.lang.NullPointerException at org.apache.spark.shuffle.ShuffleBlockPusher$.(ShuffleBlockPusher.scala:465) ... 16 more



Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source