List of usage examples for java.util Random nextDouble
public double nextDouble()
From source file:org.orekit.utils.TimeStampedFieldAngularCoordinatesTest.java
@Test public void testRodriguesSymmetry() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { // use reflection to test the private static methods Method getter = TimeStampedFieldAngularCoordinates.class.getDeclaredMethod("getModifiedRodrigues", new Class<?>[] { TimeStampedFieldAngularCoordinates.class, double[].class, double.class }); getter.setAccessible(true);/*ww w .j a v a 2 s . c om*/ Method factory = TimeStampedFieldAngularCoordinates.class.getDeclaredMethod("createFromModifiedRodrigues", new Class<?>[] { RealFieldElement[][].class, TimeStampedFieldAngularCoordinates.class }); factory.setAccessible(true); // check the two-way conversion result in identity Random random = new Random(0xb1e615aaa8236b52l); double[] previous = new double[] { 1.0, 0.0, 0.0, 0.0 }; for (int i = 0; i < 1000; ++i) { FieldRotation<DerivativeStructure> offsetRotation = randomRotation(random); FieldVector3D<DerivativeStructure> offsetRate = randomVector(random, 0.01); TimeStampedFieldAngularCoordinates<DerivativeStructure> offset = new TimeStampedFieldAngularCoordinates<DerivativeStructure>( AbsoluteDate.J2000_EPOCH, offsetRotation, offsetRate, createVector(0, 0, 0, 4)); FieldRotation<DerivativeStructure> rotation = randomRotation(random); FieldVector3D<DerivativeStructure> rotationRate = randomVector(random, 0.01); FieldVector3D<DerivativeStructure> rotationAcceleration = randomVector(random, 0.01); TimeStampedFieldAngularCoordinates<DerivativeStructure> ac = new TimeStampedFieldAngularCoordinates<DerivativeStructure>( AbsoluteDate.J2000_EPOCH, rotation, rotationRate, rotationAcceleration); double dt = 10.0 * random.nextDouble(); DerivativeStructure[][] rodrigues = (DerivativeStructure[][]) getter.invoke(null, ac.subtractOffset(offset.shiftedBy(dt)), previous, -0.9999); @SuppressWarnings("unchecked") TimeStampedFieldAngularCoordinates<DerivativeStructure> rebuilt = (TimeStampedFieldAngularCoordinates<DerivativeStructure>) factory .invoke(null, rodrigues, offset.shiftedBy(dt)); Assert.assertEquals(0.0, FieldRotation.distance(rotation, rebuilt.getRotation()).getReal(), 1.0e-14); Assert.assertEquals(0.0, FieldVector3D.distance(rotationRate, rebuilt.getRotationRate()).getReal(), 1.0e-15); Assert.assertEquals(0.0, FieldVector3D.distance(rotationAcceleration, rebuilt.getRotationAcceleration()).getReal(), 1.0e-15); } }
From source file:com.gatf.generator.core.GatfTestGeneratorMojo.java
private Object getPrimitiveValue(Type claz) { if (isPrimitive(claz)) { if (claz.equals(boolean.class) || claz.equals(Boolean.class)) { Random rand = new Random(); return rand.nextBoolean(); } else if (claz.equals(Date.class)) { return new Date(); } else if (claz.equals(Double.class) || claz.equals(double.class)) { Random rand = new Random(12345678L); return rand.nextDouble(); } else if (claz.equals(Float.class) || claz.equals(float.class)) { Random rand = new Random(12345678L); return rand.nextFloat(); } else if (claz.equals(String.class)) { return RandomStringUtils.randomAlphabetic(10); } else if (claz.equals(Long.class) || claz.equals(long.class) || claz.equals(Number.class)) { Random rand = new Random(); return new Long(rand.nextInt(123)); } else if (claz.equals(Integer.class) || claz.equals(int.class)) { Random rand = new Random(); return new Integer(rand.nextInt(123)); } else if (claz.equals(BigInteger.class)) { Random rand = new Random(); return new BigInteger(new BigInteger("1234567890123456789").bitLength(), rand); } else if (claz.equals(BigDecimal.class)) { Random rand = new Random(); return new BigDecimal(rand.nextInt(123)); } else if (claz.equals(Short.class) || claz.equals(short.class)) { Random rand = new Random(); return new Short((short) rand.nextInt(123)); }// ww w . j a v a 2 s . c o m } return null; }
From source file:org.apache.tinkerpop.gremlin.structure.TransactionTest.java
@Test @FeatureRequirement(featureClass = Graph.Features.GraphFeatures.class, feature = Graph.Features.GraphFeatures.FEATURE_TRANSACTIONS) @FeatureRequirement(featureClass = Graph.Features.EdgeFeatures.class, feature = Graph.Features.EdgeFeatures.FEATURE_ADD_EDGES) @FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = Graph.Features.VertexFeatures.FEATURE_ADD_VERTICES) @FeatureRequirement(featureClass = Graph.Features.VertexPropertyFeatures.class, feature = FEATURE_DOUBLE_VALUES) @FeatureRequirement(featureClass = Graph.Features.VertexPropertyFeatures.class, feature = FEATURE_INTEGER_VALUES) @FeatureRequirement(featureClass = Graph.Features.EdgePropertyFeatures.class, feature = EdgePropertyFeatures.FEATURE_FLOAT_VALUES) @FeatureRequirement(featureClass = Graph.Features.EdgePropertyFeatures.class, feature = EdgePropertyFeatures.FEATURE_INTEGER_VALUES) public void shouldExecuteWithCompetingThreads() { int totalThreads = 250; final AtomicInteger vertices = new AtomicInteger(0); final AtomicInteger edges = new AtomicInteger(0); final AtomicInteger completedThreads = new AtomicInteger(0); for (int i = 0; i < totalThreads; i++) { new Thread() { @Override// w w w .ja v a 2s .co m public void run() { final Random random = new Random(); if (random.nextBoolean()) { final Vertex a = graph.addVertex(); final Vertex b = graph.addVertex(); final Edge e = a.addEdge("friend", b); vertices.getAndAdd(2); a.property(VertexProperty.Cardinality.single, "test", this.getId()); b.property(VertexProperty.Cardinality.single, "blah", random.nextDouble()); e.property("bloop", random.nextInt()); edges.getAndAdd(1); graph.tx().commit(); } else { final Vertex a = graph.addVertex(); final Vertex b = graph.addVertex(); final Edge e = a.addEdge("friend", b); a.property(VertexProperty.Cardinality.single, "test", this.getId()); b.property(VertexProperty.Cardinality.single, "blah", random.nextDouble()); e.property("bloop", random.nextInt()); if (random.nextBoolean()) { graph.tx().commit(); vertices.getAndAdd(2); edges.getAndAdd(1); } else { graph.tx().rollback(); } } completedThreads.getAndAdd(1); } }.start(); } while (completedThreads.get() < totalThreads) { } assertEquals(completedThreads.get(), 250); assertVertexEdgeCounts(vertices.get(), edges.get()); }
From source file:jhplot.H2D.java
/** Fill the histogram with random numbers from fralt distribution. * Seed is taken from time. /* w w w.j a v a 2 s . co m*/ * Using mean=0 and width=1 will give a flat distribution between 0 and 1. * @param TotNumber number generated events * @param meanX mean of the distribution in X * @param widthX width of the distribution in X * @param meanY mean of the distribution in Y * @param widthY width of the distribution in Y */ public void fillRnd(int TotNumber, double meanX, double widthX, double meanY, double widthY) { java.util.Random random = new java.util.Random(); for (int i = 0; i < TotNumber; i++) h1.fill(widthX * random.nextDouble() + meanX, widthY * random.nextDouble() + meanY); }
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.LucenePropertyIndexTest.java
private static List<Double> createDoubles(int n) { Random rnd = new Random(); List<Double> values = Lists.newArrayListWithCapacity(n); for (long i = 0; i < n; i++) { values.add(rnd.nextDouble()); }/* w ww .j a v a 2 s.co m*/ Collections.shuffle(values); return values; }
From source file:ml.shifu.shifu.core.dtrain.lr.LogisticRegressionWorker.java
protected float sampleWeights(float label) { float sampleWeights = 1f; // sample negative or kFoldCV, sample rate is 1d double sampleRate = (modelConfig.getTrain().getSampleNegOnly() || this.isKFoldCV) ? 1d : modelConfig.getTrain().getBaggingSampleRate(); int classValue = (int) (label + 0.01f); if (!modelConfig.isBaggingWithReplacement()) { Random random = null; if (this.isStratifiedSampling) { random = baggingRandomMap.get(classValue); if (random == null) { random = DTrainUtils.generateRandomBySampleSeed(modelConfig.getTrain().getBaggingSampleSeed(), CommonConstants.NOT_CONFIGURED_BAGGING_SEED); baggingRandomMap.put(classValue, random); }//w ww . j ava 2 s. co m } else { random = baggingRandomMap.get(0); if (random == null) { random = DTrainUtils.generateRandomBySampleSeed(modelConfig.getTrain().getBaggingSampleSeed(), CommonConstants.NOT_CONFIGURED_BAGGING_SEED); baggingRandomMap.put(0, random); } } if (random.nextDouble() <= sampleRate) { sampleWeights = 1f; } else { sampleWeights = 0f; } } else { // bagging with replacement sampling in training data set, take PoissonDistribution for sampling with // replacement if (this.isStratifiedSampling) { PoissonDistribution rng = this.baggingRngMap.get(classValue); if (rng == null) { rng = new PoissonDistribution(sampleRate); this.baggingRngMap.put(classValue, rng); } sampleWeights = rng.sample(); } else { PoissonDistribution rng = this.baggingRngMap.get(0); if (rng == null) { rng = new PoissonDistribution(sampleRate); this.baggingRngMap.put(0, rng); } sampleWeights = rng.sample(); } } return sampleWeights; }
From source file:edu.uc.rphash.tests.kmeanspp.KMeansPlusPlus.java
/** * Use K-means++ to choose the initial centers. * * @param <T> type of the points to cluster * @param points the points to choose the initial centers from * @param k the number of centers to choose * @param random random generator to use * @return the initial centers//from www .j ava 2 s . com */ private static <T extends Clusterable<T>> List<Cluster<T>> chooseInitialCenters(final Collection<T> points, final int k, final Random random) { // Convert to list for indexed access. Make it unmodifiable, since removal of items // would screw up the logic of this method. final List<T> pointList = Collections.unmodifiableList(new ArrayList<T>(points)); // The number of points in the list. final int numPoints = pointList.size(); // Set the corresponding element in this array to indicate when // elements of pointList are no longer available. final boolean[] taken = new boolean[numPoints]; // The resulting list of initial centers. final List<Cluster<T>> resultSet = new ArrayList<Cluster<T>>(); // Choose one center uniformly at random from among the data points. final int firstPointIndex = random.nextInt(numPoints); final T firstPoint = pointList.get(firstPointIndex); resultSet.add(new Cluster<T>(firstPoint)); // Must mark it as taken taken[firstPointIndex] = true; // To keep track of the minimum distance squared of elements of // pointList to elements of resultSet. final double[] minDistSquared = new double[numPoints]; // Initialize the elements. Since the only point in resultSet is firstPoint, // this is very easy. for (int i = 0; i < numPoints; i++) { if (i != firstPointIndex) { // That point isn't considered double d = firstPoint.distanceFrom(pointList.get(i)); minDistSquared[i] = d * d; } } while (resultSet.size() < k) { // Sum up the squared distances for the points in pointList not // already taken. double distSqSum = 0.0; for (int i = 0; i < numPoints; i++) { if (!taken[i]) { distSqSum += minDistSquared[i]; } } // Add one new data point as a center. Each point x is chosen with // probability proportional to D(x)2 final double r = random.nextDouble() * distSqSum; // The index of the next point to be added to the resultSet. int nextPointIndex = -1; // Sum through the squared min distances again, stopping when // sum >= r. double sum = 0.0; for (int i = 0; i < numPoints; i++) { if (!taken[i]) { sum += minDistSquared[i]; if (sum >= r) { nextPointIndex = i; break; } } } // If it's not set to >= 0, the point wasn't found in the previous // for loop, probably because distances are extremely small. Just pick // the last available point. if (nextPointIndex == -1) { for (int i = numPoints - 1; i >= 0; i--) { if (!taken[i]) { nextPointIndex = i; break; } } } // We found one. if (nextPointIndex >= 0) { final T p = pointList.get(nextPointIndex); resultSet.add(new Cluster<T>(p)); // Mark it as taken. taken[nextPointIndex] = true; if (resultSet.size() < k) { // Now update elements of minDistSquared. We only have to compute // the distance to the new center to do this. for (int j = 0; j < numPoints; j++) { // Only have to worry about the points still not taken. if (!taken[j]) { double d = p.distanceFrom(pointList.get(j)); double d2 = d * d; if (d2 < minDistSquared[j]) { minDistSquared[j] = d2; } } } } } else { // None found -- // Break from the while loop to prevent // an infinite loop. break; } } return resultSet; }
From source file:se.llbit.chunky.renderer.scene.PathTracer.java
/** * Path trace the ray in this scene/*from ww w .j a v a 2 s . c o m*/ * @param scene * @param state * @param addEmitted * @param first */ public static final boolean pathTrace(Scene scene, Ray ray, WorkerState state, int addEmitted, boolean first) { boolean hit = false; Random random = state.random; Vector3d ox = new Vector3d(ray.o); Vector3d od = new Vector3d(ray.d); double s = 0; while (true) { if (!RayTracer.nextIntersection(scene, ray, state)) { if (ray.getPrevMaterial() == Block.WATER) { ray.color.set(0, 0, 0, 1); hit = true; } else if (ray.depth == 0) { // direct sky hit if (!scene.transparentSky()) { scene.sky.getSkyColorInterpolated(ray); hit = true; } } else if (ray.specular) { // sky color scene.sky.getSkySpecularColor(ray); hit = true; } else { scene.sky.getSkyColor(ray); hit = true; } break; } double pSpecular = 0; Material currentMat = ray.getCurrentMaterial(); Material prevMat = ray.getPrevMaterial(); if (!scene.stillWater && ray.n.y != 0 && ((currentMat == Block.WATER && prevMat == Block.AIR) || (currentMat == Block.AIR && prevMat == Block.WATER))) { WaterModel.doWaterDisplacement(ray); if (currentMat == Block.AIR) { ray.n.y = -ray.n.y; } } if (currentMat.isShiny) { if (currentMat == Block.WATER) { pSpecular = Scene.WATER_SPECULAR; } else { pSpecular = Scene.SPECULAR_COEFF; } } double pDiffuse = ray.color.w; float n1 = prevMat.ior; float n2 = currentMat.ior; if (pDiffuse + pSpecular < Ray.EPSILON && n1 == n2) continue; if (first) { s = ray.distance; first = false; } if (currentMat.isShiny && random.nextDouble() < pSpecular) { if (!scene.kill(ray.depth + 1, random)) { Ray reflected = new Ray(); reflected.specularReflection(ray); if (pathTrace(scene, reflected, state, 1, false)) { ray.color.x *= reflected.color.x; ray.color.y *= reflected.color.y; ray.color.z *= reflected.color.z; hit = true; } } } else { if (random.nextDouble() < pDiffuse) { if (!scene.kill(ray.depth + 1, random)) { Ray reflected = new Ray(); reflected.set(ray); double emittance = 0; if (scene.emittersEnabled && currentMat.isEmitter) { emittance = addEmitted; ray.emittance.x = ray.color.x * ray.color.x * currentMat.emittance * scene.emitterIntensity; ray.emittance.y = ray.color.y * ray.color.y * currentMat.emittance * scene.emitterIntensity; ray.emittance.z = ray.color.z * ray.color.z * currentMat.emittance * scene.emitterIntensity; hit = true; } if (scene.sunEnabled) { scene.sun.getRandomSunDirection(reflected, random); double directLightR = 0; double directLightG = 0; double directLightB = 0; boolean frontLight = reflected.d.dot(ray.n) > 0; if (frontLight || (currentMat.subSurfaceScattering && random.nextDouble() < Scene.fSubSurface)) { if (!frontLight) { reflected.o.scaleAdd(-Ray.OFFSET, ray.n); } reflected.setCurrentMat(reflected.getPrevMaterial(), reflected.getPrevData()); getDirectLightAttenuation(scene, reflected, state); Vector4d attenuation = state.attenuation; if (attenuation.w > 0) { double mult = QuickMath.abs(reflected.d.dot(ray.n)); directLightR = attenuation.x * attenuation.w * mult; directLightG = attenuation.y * attenuation.w * mult; directLightB = attenuation.z * attenuation.w * mult; hit = true; } } reflected.diffuseReflection(ray, random); hit = pathTrace(scene, reflected, state, 0, false) || hit; if (hit) { ray.color.x = ray.color.x * (emittance + directLightR * scene.sun.emittance.x + (reflected.color.x + reflected.emittance.x)); ray.color.y = ray.color.y * (emittance + directLightG * scene.sun.emittance.y + (reflected.color.y + reflected.emittance.y)); ray.color.z = ray.color.z * (emittance + directLightB * scene.sun.emittance.z + (reflected.color.z + reflected.emittance.z)); } } else { reflected.diffuseReflection(ray, random); hit = pathTrace(scene, reflected, state, 0, false) || hit; if (hit) { ray.color.x = ray.color.x * (emittance + (reflected.color.x + reflected.emittance.x)); ray.color.y = ray.color.y * (emittance + (reflected.color.y + reflected.emittance.y)); ray.color.z = ray.color.z * (emittance + (reflected.color.z + reflected.emittance.z)); } } } } else if (n1 != n2) { boolean doRefraction = currentMat == Block.WATER || prevMat == Block.WATER || currentMat == Block.ICE || prevMat == Block.ICE; // refraction float n1n2 = n1 / n2; double cosTheta = -ray.n.dot(ray.d); double radicand = 1 - n1n2 * n1n2 * (1 - cosTheta * cosTheta); if (doRefraction && radicand < Ray.EPSILON) { // total internal reflection if (!scene.kill(ray.depth + 1, random)) { Ray reflected = new Ray(); reflected.specularReflection(ray); if (pathTrace(scene, reflected, state, 1, false)) { ray.color.x = reflected.color.x; ray.color.y = reflected.color.y; ray.color.z = reflected.color.z; hit = true; } } } else { if (!scene.kill(ray.depth + 1, random)) { Ray refracted = new Ray(); refracted.set(ray); // Calculate angle-dependent reflectance using // Fresnel equation approximation // R(theta) = R0 + (1 - R0) * (1 - cos(theta))^5 float a = (n1n2 - 1); float b = (n1n2 + 1); double R0 = a * a / (b * b); double c = 1 - cosTheta; double Rtheta = R0 + (1 - R0) * c * c * c * c * c; if (random.nextDouble() < Rtheta) { Ray reflected = new Ray(); reflected.specularReflection(ray); if (pathTrace(scene, reflected, state, 1, false)) { ray.color.x = reflected.color.x; ray.color.y = reflected.color.y; ray.color.z = reflected.color.z; hit = true; } } else { if (doRefraction) { double t2 = FastMath.sqrt(radicand); if (cosTheta > 0) { refracted.d.x = n1n2 * ray.d.x + (n1n2 * cosTheta - t2) * ray.n.x; refracted.d.y = n1n2 * ray.d.y + (n1n2 * cosTheta - t2) * ray.n.y; refracted.d.z = n1n2 * ray.d.z + (n1n2 * cosTheta - t2) * ray.n.z; } else { refracted.d.x = n1n2 * ray.d.x - (-n1n2 * cosTheta - t2) * ray.n.x; refracted.d.y = n1n2 * ray.d.y - (-n1n2 * cosTheta - t2) * ray.n.y; refracted.d.z = n1n2 * ray.d.z - (-n1n2 * cosTheta - t2) * ray.n.z; } refracted.d.normalize(); refracted.o.scaleAdd(Ray.OFFSET, refracted.d); } if (pathTrace(scene, refracted, state, 1, false)) { ray.color.x = ray.color.x * pDiffuse + (1 - pDiffuse); ray.color.y = ray.color.y * pDiffuse + (1 - pDiffuse); ray.color.z = ray.color.z * pDiffuse + (1 - pDiffuse); ray.color.x *= refracted.color.x; ray.color.y *= refracted.color.y; ray.color.z *= refracted.color.z; hit = true; } } } } } else { Ray transmitted = new Ray(); transmitted.set(ray); transmitted.o.scaleAdd(Ray.OFFSET, transmitted.d); if (pathTrace(scene, transmitted, state, 1, false)) { ray.color.x = ray.color.x * pDiffuse + (1 - pDiffuse); ray.color.y = ray.color.y * pDiffuse + (1 - pDiffuse); ray.color.z = ray.color.z * pDiffuse + (1 - pDiffuse); ray.color.x *= transmitted.color.x; ray.color.y *= transmitted.color.y; ray.color.z *= transmitted.color.z; hit = true; } } } if (hit && prevMat == Block.WATER) { // do water fog double a = ray.distance / scene.waterVisibility; double attenuation = 1 - QuickMath.min(1, a * a); ray.color.scale(attenuation); /*ray.color.x *= attenuation; ray.color.y *= attenuation; ray.color.z *= attenuation; float[] wc = Texture.water.getAvgColorLinear(); ray.color.x += (1-attenuation) * wc[0]; ray.color.y += (1-attenuation) * wc[1]; ray.color.z += (1-attenuation) * wc[2]; ray.color.w = attenuation;*/ } break; } if (!hit) { ray.color.set(0, 0, 0, 1); if (first) { s = ray.distance; } } if (s > 0) { if (scene.atmosphereEnabled) { double Fex = scene.sun.extinction(s); ray.color.x *= Fex; ray.color.y *= Fex; ray.color.z *= Fex; if (!scene.volumetricFogEnabled) { double Fin = scene.sun.inscatter(Fex, scene.sun.theta(ray.d)); ray.color.x += Fin * scene.sun.emittance.x * scene.sun.getIntensity(); ray.color.y += Fin * scene.sun.emittance.y * scene.sun.getIntensity(); ray.color.z += Fin * scene.sun.emittance.z * scene.sun.getIntensity(); } } if (scene.volumetricFogEnabled) { s = (s - Ray.OFFSET) * random.nextDouble(); Ray reflected = new Ray(); reflected.o.scaleAdd(s, od, ox); scene.sun.getRandomSunDirection(reflected, random); reflected.setCurrentMat(Block.AIR, 0); getDirectLightAttenuation(scene, reflected, state); Vector4d attenuation = state.attenuation; double Fex = scene.sun.extinction(s); double Fin = scene.sun.inscatter(Fex, scene.sun.theta(ray.d)); ray.color.x += 50 * attenuation.x * attenuation.w * Fin * scene.sun.emittance.x * scene.sun.getIntensity(); ray.color.y += 50 * attenuation.y * attenuation.w * Fin * scene.sun.emittance.y * scene.sun.getIntensity(); ray.color.z += 50 * attenuation.z * attenuation.w * Fin * scene.sun.emittance.z * scene.sun.getIntensity(); } } return hit; }
From source file:org.apache.hadoop.mapred.TestSequenceFileAsBinaryOutputFormat.java
public void testBinary() throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); Path dir = new Path( new Path(new Path(System.getProperty("test.build.data", ".")), FileOutputCommitter.TEMP_DIR_NAME), "_" + attempt); Path file = new Path(dir, "testbinary.seq"); Random r = new Random(); long seed = r.nextLong(); r.setSeed(seed);/* w w w.j av a2s . c o m*/ fs.delete(dir, true); if (!fs.mkdirs(dir)) { fail("Failed to create output directory"); } job.set("mapred.task.id", attempt); FileOutputFormat.setOutputPath(job, dir.getParent().getParent()); FileOutputFormat.setWorkOutputPath(job, dir); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, IntWritable.class); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, DoubleWritable.class); SequenceFileAsBinaryOutputFormat.setCompressOutput(job, true); SequenceFileAsBinaryOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK); BytesWritable bkey = new BytesWritable(); BytesWritable bval = new BytesWritable(); RecordWriter<BytesWritable, BytesWritable> writer = new SequenceFileAsBinaryOutputFormat() .getRecordWriter(fs, job, file.toString(), Reporter.NULL); IntWritable iwritable = new IntWritable(); DoubleWritable dwritable = new DoubleWritable(); DataOutputBuffer outbuf = new DataOutputBuffer(); LOG.info("Creating data by SequenceFileAsBinaryOutputFormat"); try { for (int i = 0; i < RECORDS; ++i) { iwritable = new IntWritable(r.nextInt()); iwritable.write(outbuf); bkey.set(outbuf.getData(), 0, outbuf.getLength()); outbuf.reset(); dwritable = new DoubleWritable(r.nextDouble()); dwritable.write(outbuf); bval.set(outbuf.getData(), 0, outbuf.getLength()); outbuf.reset(); writer.write(bkey, bval); } } finally { writer.close(Reporter.NULL); } InputFormat<IntWritable, DoubleWritable> iformat = new SequenceFileInputFormat<IntWritable, DoubleWritable>(); int count = 0; r.setSeed(seed); DataInputBuffer buf = new DataInputBuffer(); final int NUM_SPLITS = 3; SequenceFileInputFormat.addInputPath(job, file); LOG.info("Reading data by SequenceFileInputFormat"); for (InputSplit split : iformat.getSplits(job, NUM_SPLITS)) { RecordReader<IntWritable, DoubleWritable> reader = iformat.getRecordReader(split, job, Reporter.NULL); try { int sourceInt; double sourceDouble; while (reader.next(iwritable, dwritable)) { sourceInt = r.nextInt(); sourceDouble = r.nextDouble(); assertEquals("Keys don't match: " + "*" + iwritable.get() + ":" + sourceInt + "*", sourceInt, iwritable.get()); assertTrue("Vals don't match: " + "*" + dwritable.get() + ":" + sourceDouble + "*", Double.compare(dwritable.get(), sourceDouble) == 0); ++count; } } finally { reader.close(); } } assertEquals("Some records not found", RECORDS, count); }
From source file:org.apache.hadoop.mapreduce.lib.output.TestMRSequenceFileAsBinaryOutputFormat.java
public void testBinary() throws IOException, InterruptedException { Configuration conf = new Configuration(); Job job = new Job(conf); Path outdir = new Path(System.getProperty("test.build.data", "/tmp"), "outseq"); Random r = new Random(); long seed = r.nextLong(); r.setSeed(seed);// w w w. j a va2 s . c om FileOutputFormat.setOutputPath(job, outdir); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, IntWritable.class); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, DoubleWritable.class); SequenceFileAsBinaryOutputFormat.setCompressOutput(job, true); SequenceFileAsBinaryOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK); BytesWritable bkey = new BytesWritable(); BytesWritable bval = new BytesWritable(); TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration()); OutputFormat<BytesWritable, BytesWritable> outputFormat = new SequenceFileAsBinaryOutputFormat(); OutputCommitter committer = outputFormat.getOutputCommitter(context); committer.setupJob(job); RecordWriter<BytesWritable, BytesWritable> writer = outputFormat.getRecordWriter(context); IntWritable iwritable = new IntWritable(); DoubleWritable dwritable = new DoubleWritable(); DataOutputBuffer outbuf = new DataOutputBuffer(); LOG.info("Creating data by SequenceFileAsBinaryOutputFormat"); try { for (int i = 0; i < RECORDS; ++i) { iwritable = new IntWritable(r.nextInt()); iwritable.write(outbuf); bkey.set(outbuf.getData(), 0, outbuf.getLength()); outbuf.reset(); dwritable = new DoubleWritable(r.nextDouble()); dwritable.write(outbuf); bval.set(outbuf.getData(), 0, outbuf.getLength()); outbuf.reset(); writer.write(bkey, bval); } } finally { writer.close(context); } committer.commitTask(context); committer.commitJob(job); InputFormat<IntWritable, DoubleWritable> iformat = new SequenceFileInputFormat<IntWritable, DoubleWritable>(); int count = 0; r.setSeed(seed); SequenceFileInputFormat.setInputPaths(job, outdir); LOG.info("Reading data by SequenceFileInputFormat"); for (InputSplit split : iformat.getSplits(job)) { RecordReader<IntWritable, DoubleWritable> reader = iformat.createRecordReader(split, context); MapContext<IntWritable, DoubleWritable, BytesWritable, BytesWritable> mcontext = new MapContext<IntWritable, DoubleWritable, BytesWritable, BytesWritable>( job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split); reader.initialize(split, mcontext); try { int sourceInt; double sourceDouble; while (reader.nextKeyValue()) { sourceInt = r.nextInt(); sourceDouble = r.nextDouble(); iwritable = reader.getCurrentKey(); dwritable = reader.getCurrentValue(); assertEquals("Keys don't match: " + "*" + iwritable.get() + ":" + sourceInt + "*", sourceInt, iwritable.get()); assertTrue("Vals don't match: " + "*" + dwritable.get() + ":" + sourceDouble + "*", Double.compare(dwritable.get(), sourceDouble) == 0); ++count; } } finally { reader.close(); } } assertEquals("Some records not found", RECORDS, count); }