Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

security exception connecting spark master java

I'm new at Spark. In my project, I set master url and app name as SparkConf object.

here is my code

import org.apache.hadoop.conf.Configuration;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.bson.BSONObject;
import com.mongodb.hadoop.MongoInputFormat;
import com.mongodb.hadoop.MongoOutputFormat;

public class SparkExample {

@SuppressWarnings("resource")
public static void main(String[] args) {

    Configuration conf = new Configuration();

    conf.set("mongo.job.input.format", "com.mongodb.hadoop.MongoInputFormat");

    conf.set("mongo.input.uri", "mongodb://192.168.1.149:27017/test.sns");



    JavaSparkContext sc = new JavaSparkContext(new SparkConf().setMaster("spark://localhost:7077").setAppName("vedat"));

    JavaPairRDD<Object, BSONObject> documents = sc.newAPIHadoopRDD(conf, MongoInputFormat.class, Object.class,
            BSONObject.class);

    Configuration outConfig = new Configuration();

    outConfig.set("mongo.job.output.format", "com.mongodb.hadoop.MongoOutputFormat");

    outConfig.set("mongo.output.uri", "mongodb://192.168.1.149:27017/test.ou");

    documents.saveAsNewAPIHadoopFile("file://44", Object.class, BSONObject.class, MongoOutputFormat.class,
            outConfig);

    Configuration bsonConf = new Configuration();
    bsonConf.set("mongo.job.output.format", "com.mongodb.hadoop.BSONFileOutputFormat");

    // documents.saveAsNewAPIHadoopFile("hds://localhost:8020/user/spark/demo",
    // Object.class, BSONObject.class,
    // BSONFileOutputformat.class, bsonConf);

}

}

When I run code I get this stack trace

ERROR spark.SparkContext: Error initializing SparkContext.
java.lang.SecurityException: class "javax.servlet.FilterRegistration"'s      signer information does not match signer information of other classes in the same package
at java.lang.ClassLoader.checkCerts(ClassLoader.java:952)
at java.lang.ClassLoader.preDefineClass(ClassLoader.java:666)
at java.lang.ClassLoader.defineClass(ClassLoader.java:794)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:136)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:129)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:98)
at    
org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:108)

at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:99)
at org.apache.spark.ui.WebUI.attachPage(WebUI.scala:78)
at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)
at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)
at  scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at org.apache.spark.ui.WebUI.attachTab(WebUI.scala:62)
at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:61)
at org.apache.spark.ui.SparkUI.<init>(SparkUI.scala:74)
at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:190)
at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:141)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:440)
at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
at tr.vedat.spark.SparkExample.main(SparkExample.java:24)
15/07/20 14:44:42 INFO spark.MapOutputTrackerMasterEndpoint:       MapOutputTrackerMasterEndpoint stopped!
15/07/20 14:44:42 ERROR spark.SparkContext: Error stopping SparkContext after init error.
java.lang.NullPointerException
at      
  org.apache.spark.network.netty.NettyBlockTransferService.close(NettyBlockTransfe   rService.scala:152)
at org.apache.spark.storage.BlockManager.stop(BlockManager.scala:1214)
at org.apache.spark.SparkEnv.stop(SparkEnv.scala:96)
at org.apache.spark.SparkContext.stop(SparkContext.scala:1657)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:565)
at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
at tr.vedat.spark.SparkExample.main(SparkExample.java:24)
Exception in thread "main" java.lang.SecurityException: class    "javax.servlet.FilterRegistration"'s signer information does not match signer information of other classes in the same package
at java.lang.ClassLoader.checkCerts(ClassLoader.java:952)
at java.lang.ClassLoader.preDefineClass(ClassLoader.java:666)
at java.lang.ClassLoader.defineClass(ClassLoader.java:794)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:136)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:129)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:98)
at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:108)
at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:99)
at org.apache.spark.ui.WebUI.attachPage(WebUI.scala:78)
at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)
at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at org.apache.spark.ui.WebUI.attachTab(WebUI.scala:62)
at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:61)
at org.apache.spark.ui.SparkUI.<init>(SparkUI.scala:74)
at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:190)
at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:141)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:440)
at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
at tr.vedat.spark.SparkExample.main(SparkExample.java:24)

Here is my pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>tr.com.vedat</groupId>
<artifactId>aa</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>

<name>aa</name>
<url>http://maven.apache.org</url>

<properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>

<dependencies>
    <dependency>
        <groupId>junit</groupId>
        <artifactId>junit</artifactId>
        <version>3.8.1</version>
        <scope>test</scope>
    </dependency>


    <dependency>
        <groupId>ant</groupId>
        <artifactId>ant</artifactId>
        <version>1.6.5</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-common</artifactId>
        <version>2.6.0</version>
        <exclusions>
            <exclusion>
                <groupId>org.eclipse.jetty</groupId>
                <artifactId>*</artifactId>
            </exclusion>
            <exclusion>
                <groupId>javax.servlet</groupId>
                <artifactId>*</artifactId>
            </exclusion>
        </exclusions>

    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>2.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.10</artifactId>
        <version>1.4.0</version>
        <exclusions>
            <exclusion>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-client</artifactId>
            </exclusion>
        </exclusions>
    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-mapreduce-client-app</artifactId>
        <version>2.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-mllib_2.10</artifactId>
        <version>1.4.0</version>
    </dependency>

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-core</artifactId>
        <version>1.2.1</version>
    </dependency>
    <dependency>
        <groupId>commons-cli</groupId>
        <artifactId>commons-cli</artifactId>
        <version>1.2</version>
    </dependency>
    <dependency>
        <groupId>commons-configuration</groupId>
        <artifactId>commons-configuration</artifactId>
        <version>1.6</version>
    </dependency>
    <dependency>
        <groupId>commons-httpclient</groupId>
        <artifactId>commons-httpclient</artifactId>
        <version>3.0.1</version>
    </dependency>
    <dependency>
        <groupId>commons-lang</groupId>
        <artifactId>commons-lang</artifactId>
        <version>2.4</version>
    </dependency>
    <dependency>
        <groupId>commons-logging</groupId>
        <artifactId>commons-logging</artifactId>
        <version>1.1.1</version>
    </dependency>
    <dependency>
        <groupId>commons-logging</groupId>
        <artifactId>commons-logging-api</artifactId>
        <version>1.0.4</version>
    </dependency>
    <dependency>
        <groupId>org.codehaus.jackson</groupId>
        <artifactId>jackson-core-asl</artifactId>
        <version>1.8.8</version>
    </dependency>
    <dependency>
        <groupId>org.codehaus.jackson</groupId>
        <artifactId>jackson-mapper-asl</artifactId>
        <version>1.8.8</version>
    </dependency>

    <dependency>
        <groupId>org.mongodb</groupId>
        <artifactId>mongo-hadoop-core_1.0.0</artifactId>
        <version>1.0.0-rc0</version>
    </dependency>

    <dependency>
        <groupId>org.mongodb</groupId>
        <artifactId>mongo-java-driver</artifactId>
        <version>2.10.1</version>
    </dependency>

</dependencies>

So, can anyone help me to show what I'm doing wrong? Why I'm getting this error?

like image 316
trallallalloo Avatar asked May 09 '26 12:05

trallallalloo


1 Answers

Spark-core depends on: org.eclipse.jetty.orbit:javax.servlet:3.0.0.v201112011016

You should exclude it. Not sure if it's the only problem, but at least part of it if you run it inside other container.

like image 58
szefuf Avatar answered May 11 '26 00:05

szefuf