List of usage examples for java.util.stream IntStream range
public static IntStream range(int startInclusive, int endExclusive)
From source file:com.wrmsr.wava.basic.BasicLoopInfo.java
public static Map<Name, Name> getLoopParents(SetMultimap<Name, Name> loopContents) { Map<Name, Name> loopParents = new HashMap<>(); Map<Name, Set<Name>> map = loopContents.keySet().stream() .collect(toHashMap(identity(), loop -> new HashSet<>())); for (Name cur : loopContents.keySet()) { map.get(cur).add(ENTRY_NAME);/* w ww . java 2 s. c o m*/ Set<Name> children = loopContents.get(cur); for (Name child : children) { if (!cur.equals(child) && loopContents.containsKey(child)) { map.get(child).add(cur); } } } Map<Name, Integer> loopDepths = map.entrySet().stream() .collect(toHashMap(entry -> entry.getKey(), entry -> entry.getValue().size())); loopDepths.put(ENTRY_NAME, 0); int maxDepth = loopDepths.values().stream().mapToInt(Integer::intValue).max().orElse(0); List<List<Name>> depthLoopsLists = IntStream.range(0, maxDepth + 1).boxed() .<List<Name>>map(i -> new ArrayList<>()).collect(toArrayList()); loopDepths.forEach((loop, depth) -> depthLoopsLists.get(depth).add(loop)); Set<Name> seen = new HashSet<>(); for (int depth = 1; depth < depthLoopsLists.size(); ++depth) { for (Name loop : depthLoopsLists.get(depth)) { Name parent = getOnlyElement(Sets.difference(map.get(loop), seen)); checkState(loopDepths.get(parent) == depth - 1); loopParents.put(loop, parent); } seen.addAll(depthLoopsLists.get(depth - 1)); } checkState(loopContents.keySet().equals(loopParents.keySet())); return loopParents; }
From source file:de.bund.bfr.math.MathUtils.java
public static double[][] aproxJacobianParallel(List<? extends MultivariateVectorFunction> functions, double[] point, int nResult) { double[][] result = new double[nResult][functions.size()]; IntStream.range(0, functions.size()).parallel().forEach(i -> { double[] p = point.clone(); p[i] = point[i] - DERIV_EPSILON; double[] result1 = functions.get(i).value(p); p[i] = point[i] + DERIV_EPSILON; double[] result2 = functions.get(i).value(p); for (int j = 0; j < nResult; j++) { result[j][i] = (result2[j] - result1[j]) / (2 * DERIV_EPSILON); }// w w w . ja v a 2 s. c o m }); return result; }
From source file:com.github.horrorho.inflatabledonkey.chunk.engine.standard.StandardChunkEngine.java
Optional<Map<ChunkServer.ChunkReference, Chunk>> chunks(int container, List<Optional<Chunk>> chunkList) { if (chunkList.contains(Optional.<Chunk>empty())) { return Optional.empty(); }//w w w .j av a 2 s. co m Map<ChunkServer.ChunkReference, Chunk> chunks = IntStream.range(0, chunkList.size()) .filter(i -> chunkList.get(i).isPresent()).mapToObj(Integer::valueOf).collect(Collectors .toMap(i -> ChunkReferences.chunkReference(container, i), i -> chunkList.get(i).get())); return Optional.of(chunks); }
From source file:edu.cmu.lti.oaqa.baseqa.providers.ml.classifiers.LibSvmProvider.java
@Override public String predict(Map<String, Double> features) { svm_node[] x = IntStream.range(1, fid2feat.size() + 1).mapToObj(j -> { svm_node node = new svm_node(); node.index = j;// w w w . j a v a 2 s . c om node.value = features.getOrDefault(fid2feat.get(j), 0.0); return node; }).toArray(svm_node[]::new); double result = svm.svm_predict(model, x); return lid2label.get((int) result); }
From source file:org.gradoop.flink.model.impl.operators.matching.single.cypher.common.pojos.EmbeddingTestUtils.java
/** * Converts the given embedding to a list of identifiers. * * @param embedding embedding//www . ja v a2 s . c om * @return id list */ public static List<GradoopId> embeddingToIdList(Embedding embedding) { List<GradoopId> idList = new ArrayList<>(); IntStream.range(0, embedding.size()).forEach(i -> idList.addAll(embedding.getIdAsList(i))); return idList; }
From source file:com.khartec.waltz.jobs.sample.InvolvementGenerator.java
private static Stream<InvolvementRecord> mkInvolvments(EntityReference appRef, List<String> employeeIds, InvolvementKind kind, int upperBound) { int count = rnd.nextInt(upperBound) + 1; return IntStream.range(0, count).mapToObj(i -> new InvolvementRecord(appRef.kind().name(), appRef.id(), kind.name(), randomPick(employeeIds), "RANDOM_GENERATOR")); }
From source file:com.epam.ta.reportportal.core.log.impl.GetLogHandlerTest.java
private List<Log> generateLogs(LogRepository logRepository) { return IntStream.range(0, ITEMS_COUNT).mapToObj(i -> { Log log = new Log(); log.setLevel(LogLevel.ERROR);//from ww w .j av a 2 s .c om log.setLogMsg(RandomStringUtils.random(10)); log.setTestItemRef(ITEM_ID); final Date now = Calendar.getInstance().getTime(); log.setLastModified(now); log.setLogTime(now); logRepository.save(log); return log; }).collect(Collectors.toList()); }
From source file:org.apache.hadoop.hbase.client.TestAsyncGetMultiThread.java
@Test public void test() throws IOException, InterruptedException, ExecutionException { int numThreads = 20; AtomicBoolean stop = new AtomicBoolean(false); ExecutorService executor = Executors.newFixedThreadPool(numThreads, Threads.newDaemonThreadFactory("TestAsyncGet-")); List<Future<?>> futures = new ArrayList<>(); IntStream.range(0, numThreads).forEach(i -> futures.add(executor.submit(() -> { run(stop);/* www . j a va2s .co m*/ return null; }))); Collections.shuffle(Arrays.asList(SPLIT_KEYS), new Random(123)); Admin admin = TEST_UTIL.getAdmin(); for (byte[] splitPoint : SPLIT_KEYS) { admin.split(TABLE_NAME, splitPoint); for (HRegion region : TEST_UTIL.getHBaseCluster().getRegions(TABLE_NAME)) { region.compact(true); } Thread.sleep(5000); admin.balancer(true); Thread.sleep(5000); ServerName metaServer = TEST_UTIL.getHBaseCluster().getServerHoldingMeta(); ServerName newMetaServer = TEST_UTIL.getHBaseCluster().getRegionServerThreads().stream() .map(t -> t.getRegionServer().getServerName()).filter(s -> !s.equals(metaServer)).findAny() .get(); admin.move(HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes(), Bytes.toBytes(newMetaServer.getServerName())); Thread.sleep(5000); } stop.set(true); executor.shutdown(); for (Future<?> future : futures) { future.get(); } }
From source file:org.apache.hadoop.hive.ql.io.orc.NiFiOrcUtils.java
public static Object convertToORCObject(TypeInfo typeInfo, Object o) { if (o != null) { if (typeInfo instanceof UnionTypeInfo) { OrcUnion union = new OrcUnion(); // Need to find which of the union types correspond to the primitive object TypeInfo objectTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector( ObjectInspectorFactory.getReflectionObjectInspector(o.getClass(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA)); List<TypeInfo> unionTypeInfos = ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos(); int index = 0; while (index < unionTypeInfos.size() && !unionTypeInfos.get(index).equals(objectTypeInfo)) { index++;/*from w ww. j a v a2 s . c o m*/ } if (index < unionTypeInfos.size()) { union.set((byte) index, convertToORCObject(objectTypeInfo, o)); } else { throw new IllegalArgumentException( "Object Type for class " + o.getClass().getName() + " not in Union declaration"); } return union; } if (o instanceof Integer) { return new IntWritable((int) o); } if (o instanceof Boolean) { return new BooleanWritable((boolean) o); } if (o instanceof Long) { return new LongWritable((long) o); } if (o instanceof Float) { return new FloatWritable((float) o); } if (o instanceof Double) { return new DoubleWritable((double) o); } if (o instanceof String || o instanceof Utf8 || o instanceof GenericData.EnumSymbol) { return new Text(o.toString()); } if (o instanceof ByteBuffer) { return new BytesWritable(((ByteBuffer) o).array()); } if (o instanceof int[]) { int[] intArray = (int[]) o; return Arrays.stream(intArray).mapToObj( (element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("int"), element)) .collect(Collectors.toList()); } if (o instanceof long[]) { long[] longArray = (long[]) o; return Arrays.stream(longArray).mapToObj( (element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("bigint"), element)) .collect(Collectors.toList()); } if (o instanceof float[]) { float[] floatArray = (float[]) o; return IntStream.range(0, floatArray.length).mapToDouble(i -> floatArray[i]) .mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("float"), (float) element)) .collect(Collectors.toList()); } if (o instanceof double[]) { double[] doubleArray = (double[]) o; return Arrays.stream(doubleArray).mapToObj( (element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("double"), element)) .collect(Collectors.toList()); } if (o instanceof boolean[]) { boolean[] booleanArray = (boolean[]) o; return IntStream.range(0, booleanArray.length).map(i -> booleanArray[i] ? 1 : 0) .mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("boolean"), element == 1)) .collect(Collectors.toList()); } if (o instanceof GenericData.Array) { GenericData.Array array = ((GenericData.Array) o); // The type information in this case is interpreted as a List TypeInfo listTypeInfo = ((ListTypeInfo) typeInfo).getListElementTypeInfo(); return array.stream().map((element) -> convertToORCObject(listTypeInfo, element)) .collect(Collectors.toList()); } if (o instanceof List) { return o; } if (o instanceof Map) { MapWritable mapWritable = new MapWritable(); TypeInfo keyInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo(); TypeInfo valueInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo(); // Unions are not allowed as key/value types, so if we convert the key and value objects, // they should return Writable objects ((Map) o).forEach((key, value) -> { Object keyObject = convertToORCObject(keyInfo, key); Object valueObject = convertToORCObject(valueInfo, value); if (keyObject == null || !(keyObject instanceof Writable) || !(valueObject instanceof Writable)) { throw new IllegalArgumentException( "Maps may only contain Writable types, and the key cannot be null"); } mapWritable.put((Writable) keyObject, (Writable) valueObject); }); return mapWritable; } if (o instanceof GenericData.Record) { GenericData.Record record = (GenericData.Record) o; TypeInfo recordSchema = NiFiOrcUtils.getOrcField(record.getSchema()); List<Schema.Field> recordFields = record.getSchema().getFields(); if (recordFields != null) { Object[] fieldObjects = new Object[recordFields.size()]; for (int i = 0; i < recordFields.size(); i++) { Schema.Field field = recordFields.get(i); Schema fieldSchema = field.schema(); Object fieldObject = record.get(field.name()); fieldObjects[i] = NiFiOrcUtils.convertToORCObject(NiFiOrcUtils.getOrcField(fieldSchema), fieldObject); } return NiFiOrcUtils.createOrcStruct(recordSchema, fieldObjects); } } throw new IllegalArgumentException("Error converting object of type " + o.getClass().getName() + " to ORC type " + typeInfo.getTypeName()); } else { return null; } }
From source file:org.magicdgs.popgenlib.diversity.NucleotideDiversity.java
/** * Gets the denominator of Watterson's θ * (formula 3 in <a href="http://www.genetics.org/content/123/3/585">Tajima (1989)</a>), which * is the {@code numberOfSamples}<SUP>th</SUP> -1 Harmonic Number. * * This method returns the exact value for up to 49 samples. Otherwise, it uses the * EulerMascheroni constant (γ) and an approximation for the digamma distribution * (ψ) for computing the n<SUP>th</SUP> Harmonic Number: * * <p>γ + ψ({@code numberOfSamples}) * * @param numberOfSamples number of samples to compute the denominator. * * @throws IllegalArgumentException if the number of samples is lower than 2. * @see Gamma#digamma(double)/*from w ww.j av a 2 s . c o m*/ */ public static double wattersonsDenominatorApproximation(final int numberOfSamples) { // Defined as sum(1/j) for j=1 to j=n-1; where n is the number of samples // This is actually the (n-1)th Harmonic number (https://en.wikipedia.org/wiki/Harmonic_number) // This could be more efficiently computed using the formula implying the digamma distribution, // which is implemented in an approximated way in commons-math3 (good enough for our purposes). // In common-math3 they only use the fast algorithm if n is >= 49 // thus, here the limit for switching to the fast algorithm is 49 too, // because it will use the same number of iterations if not if (numberOfSamples < 49) { return IntStream.range(1, numberOfSamples).mapToDouble(i -> 1d / i).sum(); } // The formula of the nth Harmonic number using the digamma function is defined as: // gamma + psi(n + 1); where gamma is the Euler-Mascheroni constant and psi the digamma function // because here we want the number numberOfSamples-1 Harmonic number, we can use directly // gamma + psi(numberOfSamples) if 49 or more samples were provided (efficient computation) return Gamma.GAMMA + Gamma.digamma(numberOfSamples); }