Spark and Cassandra Java application: Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/sql/Dataset

Spark and Cassandra Java application: Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/sql/Dataset

我得到了一个非常简单的 java 应用程序,我几乎从这个例子中复制了它:http://markmail.org/download.xqy?id=zua6upabiylzeetp&number=2

我只想读取 table 数据并在 Eclipse 控制台中显示。

我的pom.xml:

        <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
  <groupId>chat_connaction_test</groupId>
  <artifactId>ChatSparkConnectionTest</artifactId>
  <version>0.0.1-SNAPSHOT</version>
 <dependencies> 
    <dependency>
    <groupId>com.datastax.cassandra</groupId>
    <artifactId>cassandra-driver-core</artifactId>
    <version>3.1.0</version>
    </dependency>

    <dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-core_2.10</artifactId>
    <version>2.0.0</version>
    </dependency>

    <!-- https://mvnrepository.com/artifact/com.datastax.spark/spark-cassandra-connector_2.10 -->
    <dependency>
    <groupId>com.datastax.spark</groupId>
    <artifactId>spark-cassandra-connector_2.10</artifactId>
    <version>2.0.0-M3</version>
    </dependency>

    <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming_2.10 -->
    <dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-streaming_2.10</artifactId>
    <version>2.0.0</version>
    </dependency>
    <!--
    <dependency> 
    <groupId>org.apache.spark</groupId> 
    <artifactId>spark-hive_2.10</artifactId> 
    <version>1.5.2</version> 
    </dependency>
    -->
  </dependencies>
</project>

还有我的 java 代码:

    package com.chatSparkConnactionTest;

import static com.datastax.spark.connector.japi.CassandraJavaUtil.javaFunctions;
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import com.datastax.spark.connector.japi.CassandraRow;

public class JavaDemo implements Serializable {
    private static final long serialVersionUID = 1L;
    public static void main(String[] args) {

        SparkConf conf = new SparkConf().
            setAppName("chat").
            setMaster("local").
            set("spark.executor.memory","1g").
            set("spark.cassandra.connection.host", "127.0.0.1");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> cassandraRowsRDD = javaFunctions(sc).cassandraTable(
            "chat", "dictionary")

            .map(new Function<CassandraRow, String>() {
                @Override
                public String call(CassandraRow cassandraRow) throws Exception {
                    String tempResult = cassandraRow.toString();
                    System.out.println(tempResult);
                    return tempResult;
                    }
                }
            );
        System.out.println("Data as CassandraRows: \n" + 
        cassandraRowsRDD.collect().size()); // THIS IS A LINE WITH ERROR
    } 
}

这是我的错误:

16/10/05 20:49:18 INFO CassandraConnector: Connected to Cassandra cluster: Test Cluster Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/sql/Dataset at java.lang.Class.getDeclaredMethods0(Native Method) at java.lang.Class.privateGetDeclaredMethods(Unknown Source) at java.lang.Class.getDeclaredMethod(Unknown Source) at java.io.ObjectStreamClass.getPrivateMethod(Unknown Source) at java.io.ObjectStreamClass.access00(Unknown Source) at java.io.ObjectStreamClass.run(Unknown Source) at java.io.ObjectStreamClass.run(Unknown Source) at java.security.AccessController.doPrivileged(Native Method) at java.io.ObjectStreamClass.(Unknown Source) at java.io.ObjectStreamClass.lookup(Unknown Source) at java.io.ObjectOutputStream.writeObject0(Unknown Source) at java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at java.io.ObjectOutputStream.writeSerialData(Unknown Source) at java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at java.io.ObjectOutputStream.writeObject0(Unknown Source) at java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at java.io.ObjectOutputStream.writeSerialData(Unknown Source) at java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at java.io.ObjectOutputStream.writeObject0(Unknown Source) at java.io.ObjectOutputStream.writeObject(Unknown Source) at scala.collection.immutable.$colon$colon.writeObject(List.scala:379) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source) at java.lang.reflect.Method.invoke(Unknown Source) at java.io.ObjectStreamClass.invokeWriteObject(Unknown Source) at java.io.ObjectOutputStream.writeSerialData(Unknown Source) at java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at java.io.ObjectOutputStream.writeObject0(Unknown Source) at java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at java.io.ObjectOutputStream.writeSerialData(Unknown Source) at java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at java.io.ObjectOutputStream.writeObject0(Unknown Source) at java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at java.io.ObjectOutputStream.writeSerialData(Unknown Source) at java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at java.io.ObjectOutputStream.writeObject0(Unknown Source) at java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at java.io.ObjectOutputStream.writeSerialData(Unknown Source) at java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at java.io.ObjectOutputStream.writeObject0(Unknown Source) at java.io.ObjectOutputStream.writeObject(Unknown Source) at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:43) at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:100) at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:295) at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:288) at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:108) at org.apache.spark.SparkContext.clean(SparkContext.scala:2037) at org.apache.spark.SparkContext.runJob(SparkContext.scala:1896) at org.apache.spark.SparkContext.runJob(SparkContext.scala:1911) at org.apache.spark.rdd.RDD$$anonfun$collect.apply(RDD.scala:893) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:358) at org.apache.spark.rdd.RDD.collect(RDD.scala:892) at org.apache.spark.api.java.JavaRDDLike$class.collect(JavaRDDLike.scala:360) at org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45) at com.chatSparkConnactionTest.JavaDemo.main(JavaDemo.java:37) Caused by: java.lang.ClassNotFoundException: org.apache.spark.sql.Dataset at java.net.URLClassLoader.findClass(Unknown Source) at java.lang.ClassLoader.loadClass(Unknown Source) at sun.misc.Launcher$AppClassLoader.loadClass(Unknown Source) at java.lang.ClassLoader.loadClass(Unknown Source) ... 58 more

我更新了 pom.xml,但这并没有解决错误。谁能帮我解决这个问题?

谢谢!

更新 1: 这是我的构建路径截图: Link to my screenshot

我认为您需要确保 class 路径中存在以下资源:

cassandra-driver-core-2.1.0.jar
metrics-core-3.0.2.jar
slf4j-api-1.7.5.jar
netty-3.9.0-Final.jar
guava-16.0.1.jar

希望对您有所帮助

您收到 "java.lang.NoClassDefFoundError: org/apache/spark/sql/Dataset" 错误,因为 pom.xml 文件中缺少 "spark-sql" 依赖项。

如果您想使用 Spark 2.0.0 阅读 Cassandra table,那么您需要低于最低依赖项。

<dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-core_2.11</artifactId>
    <version>2.0.0</version>
</dependency>
<dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-sql_2.11</artifactId>
    <version>2.0.0</version>
</dependency>
<dependency>
    <groupId>com.datastax.spark</groupId>
    <artifactId>spark-cassandra-connector_2.11</artifactId>
    <version>2.0.0-M3</version>
</dependency>

Spark 2.0.0 提供了 SparkSession 和 Dataset API。以下是读取 Cassandra table 并打印记录的示例程序。

 public class SparkCassandraDatasetApplication {
 public static void main(String[] args) {
 SparkSession spark = SparkSession
          .builder()
          .appName("SparkCassandraDatasetApplication")
          .config("spark.sql.warehouse.dir", "/file:C:/temp")
          .config("spark.cassandra.connection.host", "127.0.0.1")
          .config("spark.cassandra.connection.port", "9042")
          .master("local[2]")
          .getOrCreate();

 //Read data
 Dataset<Row> dataset = spark.read().format("org.apache.spark.sql.cassandra")
        .options(new HashMap<String, String>() {
            {
                put("keyspace", "mykeyspace");
                put("table", "mytable");
            }
        }).load();

   //Print data
   dataset.show();       
   spark.stop();
   }        
}

如果您仍想使用 RDD,请使用下面的示例程序。

public class SparkCassandraRDDApplication {
public static void main(String[] args) {
    SparkConf conf = new SparkConf()
            .setAppName("SparkCassandraRDDApplication")
            .setMaster("local[2]")
            .set("spark.cassandra.connection.host", "127.0.0.1")
            .set("spark.cassandra.connection.port", "9042");

    JavaSparkContext sc = new JavaSparkContext(conf);

    //Read
    JavaRDD<UserData> resultsRDD = javaFunctions(sc).cassandraTable("mykeyspace", "mytable",CassandraJavaUtil.mapRowTo(UserData.class));

    //Print
    resultsRDD.foreach(data -> {
        System.out.println(data.id);
        System.out.println(data.username);
    });

    sc.stop();
  }
}

上面程序中使用的Javabean (UserData) 如下所示。

public class UserData implements Serializable{  
  String id;
  String username;     
  public String getId() {
      return id;
  }
  public void setId(String id) {
      this.id = id;
  }
  public String getUsername() {
     return username;
  }
  public void setUsername(String username) {
     this.username = username;
   }    
}

移除

<!-- https://mvnrepository.com/artifact/com.datastax.spark/spark-cassandra-connector-java_2.10 -->
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector-java_2.10</artifactId>
<version>1.6.0-M1</version>
</dependency>

您正在混合类路径中的版本。 java 模块包含在 Spark Cassandra Connector 2.0.0 的核心模块中。所以这只是引入了 spark 1.6 引用。