List of usage examples for java.util.stream IntStream range
public static IntStream range(int startInclusive, int endExclusive)
From source file:com.darkstar.beanCartography.utils.finder.Finder.java
/** * Process the bean context stack./* w w w . j av a 2s. com*/ * * @param stack stack of objects left to search * @param visited set of objects already searched */ protected void visit(Deque<BeanContext> stack, Set<BeanContext> visited) { BeanContext target = stack.pop(); if (target == null) return; if (visited.contains(target)) return; visited.add(target); // process this object and check the filters. if passed filter then run interceptors... filtersInterceptors.entrySet().stream().filter(entry -> entry.getKey().accept(target.getSource())) .forEach(entry -> entry.getValue().intercept(target.getSource())); // process this object's contained objects (i.e. see what we need to add to the stack)... if (NameUtils.isImmutable(target.getSource().getClass())) return; Object fieldValue = null; try { while (target.hasNextFieldValue()) { fieldValue = target.nextFieldValue(); // skip nulls... if (fieldValue == null) continue; // add pojo or container or whatever this is... if (!visited.contains(fieldValue) && !stack.contains(fieldValue)) stack.add(new BeanContext(fieldValue)); // arrays... if (fieldValue.getClass().isArray()) { if (!processArrays) continue; final Object arrayFieldValue = fieldValue; IntStream.range(0, Array.getLength(arrayFieldValue)).forEach(i -> { Object element = Array.get(arrayFieldValue, i); if (element != null && !visited.contains(element) && !stack.contains(element)) stack.add(new BeanContext(element)); }); // collections... } else if (fieldValue instanceof Collection<?>) { if (!processCollections) continue; ((Collection<?>) fieldValue).stream().filter( element -> element != null && !visited.contains(element) && !stack.contains(element)) .forEach(element -> stack.add(new BeanContext(element))); // maps... } else if (fieldValue instanceof Map<?, ?>) { if (!processMaps) continue; ((Map<?, ?>) fieldValue).entrySet().stream().forEach(entry -> { if (entry.getKey() != null && !visited.contains(entry.getKey()) && !stack.contains(entry.getKey())) stack.add(new BeanContext(entry.getKey())); if (entry.getValue() != null && !visited.contains(entry.getValue()) && !stack.contains(entry.getValue())) stack.add(new BeanContext(entry.getValue())); }); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:org.lightjason.agentspeak.action.builtin.TestCActionStorage.java
/** * test clear action with keys//from ww w .ja va2 s . c o m */ @Test public final void clearwithkeys() { Assume.assumeNotNull(m_context); IntStream.range(0, 100) .forEach(i -> m_context.agent().storage().put(MessageFormat.format("value {0}", i), i)); Assert.assertEquals(m_context.agent().storage().size(), 100); new CClear("value 1", "value 5", "value 73").execute(false, m_context, Collections.emptyList(), Collections.emptyList()); Assert.assertEquals(m_context.agent().storage().size(), 3); Assert.assertArrayEquals(m_context.agent().storage().keySet().toArray(), Stream.of("value 73", "value 5", "value 1").toArray()); Assert.assertArrayEquals(m_context.agent().storage().values().toArray(), Stream.of(73, 5, 1).toArray()); }
From source file:msi.gama.util.GamaListFactory.java
public static IList create(final IScope scope, final IExpression fillExpr, final Integer size) { if (fillExpr == null) { return create(Types.NO_TYPE, size); }/*from w ww. java 2s. co m*/ final Object[] contents = new Object[size]; final IType contentType = fillExpr.getGamlType(); // 10/01/14. Cannot use Arrays.fill() everywhere: see Issue 778. if (fillExpr.isConst()) { final Object o = fillExpr.value(scope); GamaExecutorService.executeThreaded(() -> IntStream.range(0, contents.length).parallel().forEach(i -> { contents[i] = o; })); } else { GamaExecutorService.executeThreaded(() -> IntStream.range(0, contents.length).parallel().forEach(i -> { contents[i] = fillExpr.value(scope); })); } return create(scope, contentType, contents); }
From source file:com.streamsets.pipeline.stage.origin.jdbc.AbstractTableJdbcSource.java
private Map<Integer, Integer> decideMaxTableSlotsForThreads() { Map<Integer, Integer> threadNumberToMaxQueueSize = new HashMap<>(); if (tableJdbcConfigBean.batchTableStrategy == BatchTableStrategy.SWITCH_TABLES) { //If it is switch table strategy, we equal divide the work between all threads //(and if it cannot be equal distribute the remaining table slots to subset of threads) int totalNumberOfTables = allTableContexts.size(); int balancedQueueSize = totalNumberOfTables / numberOfThreads; //first divide total tables / number of threads to get //an exact balanced number of table slots to be assigned to all threads IntStream.range(0, numberOfThreads) .forEach(threadNumber -> threadNumberToMaxQueueSize.put(threadNumber, balancedQueueSize)); //Remaining table slots which are not assigned, can be assigned to a subset of threads int toBeAssignedTableSlots = totalNumberOfTables % numberOfThreads; //Randomize threads and pick a set of threads for processing extra slots List<Integer> threadNumbers = IntStream.range(0, numberOfThreads).boxed().collect(Collectors.toList()); Collections.shuffle(threadNumbers); threadNumbers = threadNumbers.subList(0, toBeAssignedTableSlots); //Assign the remaining table slots to thread by incrementing the max table slot for each of the randomly selected //thread by 1 for (int threadNumber : threadNumbers) { threadNumberToMaxQueueSize.put(threadNumber, threadNumberToMaxQueueSize.get(threadNumber) + 1); }//from w w w. j a v a 2s . c o m } else { //Assign one table slot to each thread if the strategy is process all available rows //So each table will pick up one table process it completely then return it back to pool //then pick up a new table and work on it. IntStream.range(0, numberOfThreads) .forEach(threadNumber -> threadNumberToMaxQueueSize.put(threadNumber, 1)); } return threadNumberToMaxQueueSize; }
From source file:eu.amidst.core.inference.ImportanceSamplingRobust.java
private void updatePosteriorDistributions(Assignment sample, double logWeight) { int nVarsAPosteriori = variablesAPosteriori.size(); IntStream.range(0, nVarsAPosteriori).forEach(i -> { Variable variable = variablesAPosteriori.get(i); EF_UnivariateDistribution ef_univariateDistribution = variable.newUnivariateDistribution() .toEFUnivariateDistribution(); // SufficientStatistics SSposterior = SSvariablesAPosteriori.get(i); // SufficientStatistics SSsample = ef_univariateDistribution.getSufficientStatistics(sample); ArrayVector SSposterior = new ArrayVector(ef_univariateDistribution.sizeOfSufficientStatistics()); SSposterior.copy(SSvariablesAPosteriori.get(i)); ArrayVector newSSposterior;// w w w. j a va 2 s . co m if (variable.isMultinomial()) { ArrayVector SSsample = new ArrayVector(ef_univariateDistribution.sizeOfSufficientStatistics()); SSsample.copy(ef_univariateDistribution.getSufficientStatistics(sample)); if (evidence != null) { SSsample.multiplyBy(logWeight); } // ArrayVector SS = new ArrayVector(ef_univariateDistribution.getMomentParameters().size()); // SS.copy(ef_univariateDistribution.getSufficientStatistics(sample)); // System.out.println(Arrays.toString(SSposterior.toArray())); // System.out.println(Arrays.toString(SSsample.toArray())); // System.out.println(Arrays.toString(SS.toArray())); // System.out.println(); newSSposterior = robustSumOfMultinomialSufficientStatistics(SSposterior, SSsample); } else { // if (variable.isNormal() ) { // // double global_shift = SSposterior.get(2); // // //ArrayVector SSsample = new ArrayVector(ef_univariateDistribution.sizeOfSufficientStatistics()+1); // // SufficientStatistics SSsample = ef_univariateDistribution.getSufficientStatistics(sample); // // double coef1 = ef_univariateDistribution.getSufficientStatistics(sample).get(0); // double coef2 = Math.pow(coef1,2); // double shift = 0; // // // if(coef1<=global_shift) { // shift = coef1-1; // } // double log_aux = Math.log(coef1 - global_shift - shift); // // double[] SScoefs = new double[]{log_aux, 2*log_aux, shift}; // // ArrayVector AVsample = new ArrayVector(SScoefs); // //AVsample.multiplyBy(logWeight); // AVsample.sumConstant(logWeight); // //// ArrayVector SS = new ArrayVector(ef_univariateDistribution.getMomentParameters().size()); //// SS.copy(ef_univariateDistribution.getSufficientStatistics(sample)); // // newSSposterior = robustSumOfNormalSufficientStatistics(SSposterior, AVsample); // } // else { throw new UnsupportedOperationException( "ImportanceSamplingRobust.updatePosteriorDistributions() works only for multinomials"); // } } SSvariablesAPosteriori.set(i, newSSposterior); }); }
From source file:org.lightjason.agentspeak.action.builtin.TestCActionAgent.java
/** * test clear-beliefbase/*from w w w . jav a 2 s. co m*/ */ @Test public final void clearbeliefbase() { IntStream.range(0, 100).mapToObj(i -> RandomStringUtils.random(12, "abcdefghijklmnop")) .map(i -> CLiteral.from(i)).forEach(i -> m_context.agent().beliefbase().add(i)); Assert.assertEquals(m_context.agent().beliefbase().size(), 100); new CClearBeliefbase().execute(false, m_context, Collections.emptyList(), Collections.emptyList()); Assert.assertEquals(m_context.agent().beliefbase().size(), 0); }
From source file:com.rcn.controller.ResourceController.java
private String generateLicenseKey() { Random randomGenerator = new Random(System.currentTimeMillis()); return IntStream.range(0, PinLength).mapToObj(a -> PinAlphabet[randomGenerator.nextInt(PinAlphabet.length)]) .reduce(new StringBuilder(), (a, b) -> a.append(b), (a, b) -> a.append(b)).toString(); }
From source file:delfos.rs.trustbased.WeightedGraph.java
public Double[][] asMatrix() { final List<Node> nodesSorted = nodesSortingForMatrix(); Double[][] matrix = new Double[nodesSorted.size()][nodesSorted.size()]; IntStream.range(0, nodesSorted.size()).parallel().boxed().forEach(indexRow -> { Node node1 = nodesSorted.get(indexRow); IntStream.range(0, nodesSorted.size()).parallel().boxed().forEach(indexColumn -> { Node node2 = nodesSorted.get(indexColumn); double value = connectionWeight(node1, node2).orElse(0.0); matrix[indexRow][indexColumn] = value; });//from w w w. j a v a2s . co m }); return matrix; }
From source file:org.gradoop.flink.model.impl.operators.matching.single.cypher.common.pojos.EmbeddingMetaDataTest.java
@Test public void testGetVariables() throws Exception { EmbeddingMetaData metaData = new EmbeddingMetaData(); List<String> expectedVariables = Arrays.asList("a", "b", "c"); IntStream.range(0, expectedVariables.size()) .forEach(i -> metaData.setEntryColumn(expectedVariables.get(i), EntryType.EDGE, i)); assertThat(metaData.getVariables(), is(expectedVariables)); }
From source file:com.vmware.photon.controller.cloudstore.xenon.entity.SchedulingConstantGeneratorTest.java
/** * Test for distinct scheduling constants, creating hosts concurrently on a * single Xenon host.//from www . j a v a2 s.c om */ @Test(dataProvider = "HostCounts") public void testDistinctSchedulingConstantsConcurrent(int hostCount) throws Throwable { List<Long> schedulingConstants = Collections.synchronizedList(new ArrayList<>()); TestEnvironment env = TestEnvironment.create(1); List<Thread> threads = new ArrayList<>(); ServiceHost xenonHost = env.getHosts()[0]; IntStream.range(0, THREADS).forEach((threadId) -> { Thread t = new Thread(() -> { List<Long> thisThreadSchedulingConstants = createHosts(xenonHost, hostCount); schedulingConstants.addAll(thisThreadSchedulingConstants); }); t.start(); threads.add(t); }); for (Thread t : threads) { t.join(); } env.stop(); assertThat(schedulingConstants.size(), equalTo(hostCount * THREADS)); // Check that all scheduling constants are distinct (see note in // testDistinctSchedulingConstantsSerial) Set<Long> schedulingConstantsSet = new HashSet<>(); schedulingConstantsSet.addAll(schedulingConstants); assertThat(schedulingConstantsSet.size(), equalTo(schedulingConstants.size())); }