Java examples for Machine Learning AI:weka
Demonstrates the usage of JavaRDD with a use case involving following steps -
import java.util.Arrays; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; public class JavaRDDDemo { public static void main(String[] args) { final SparkConf sparkConf = new SparkConf().setAppName("Java RDD Demo").setMaster("local"); /*from w ww . j ava 2 s. c o m*/ try(final JavaSparkContext jSC = new JavaSparkContext(sparkConf)) { //Create Java RDD of type integer with list of integers final JavaRDD<Integer> intRDD = jSC.parallelize(Arrays.asList(1, 2, 3, 4, 50, 61, 72, 8, 9, 19, 31, 42, 53, 6, 7, 23)); // Create a new Java RDD by removing numbers greater than 10 from integer RDD final JavaRDD<Integer> filteredRDD = intRDD.filter((x) -> (x > 10 ? false : true)); // Create a new transformed RDD by transforming the numbers to their squares final JavaRDD<Integer> transformedRDD = filteredRDD.map((x) -> (x * x) ); // Calculate the sum of all transformed integers. Since reduce is a value function, it will trigger actual execution final int sumTransformed = transformedRDD.reduce( (x, y) -> (x + y) ); System.out.println(sumTransformed); } } }