List of usage examples for java.util.stream IntStream range
public static IntStream range(int startInclusive, int endExclusive)
From source file:com.uber.hoodie.common.util.TestCompactionUtils.java
private void testGetAllPendingCompactionOperations(boolean inflight, int numEntriesInPlan1, int numEntriesInPlan2, int numEntriesInPlan3, int numEntriesInPlan4) throws IOException { HoodieCompactionPlan plan1 = createCompactionPlan("000", numEntriesInPlan1); HoodieCompactionPlan plan2 = createCompactionPlan("001", numEntriesInPlan2); HoodieCompactionPlan plan3 = createCompactionPlan("002", numEntriesInPlan3); HoodieCompactionPlan plan4 = createCompactionPlan("003", numEntriesInPlan4); if (inflight) { scheduleInflightCompaction("000", plan1); scheduleInflightCompaction("001", plan2); scheduleInflightCompaction("002", plan3); scheduleInflightCompaction("003", plan4); } else {/*w w w. j a v a 2 s.c o m*/ scheduleCompaction("000", plan1); scheduleCompaction("001", plan2); scheduleCompaction("002", plan3); scheduleCompaction("003", plan4); } List<Integer> expectedNumEntries = Arrays.asList(numEntriesInPlan1, numEntriesInPlan2, numEntriesInPlan3, numEntriesInPlan4); List<HoodieCompactionPlan> plans = new ImmutableList.Builder<HoodieCompactionPlan>() .add(plan1, plan2, plan3, plan4).build(); IntStream.range(0, 4).boxed().forEach(idx -> { if (expectedNumEntries.get(idx) > 0) { Assert.assertEquals("check if plan " + idx + " has exp entries", expectedNumEntries.get(idx).longValue(), plans.get(idx).getOperations().size()); } else { Assert.assertNull("Plan " + idx + " has null ops", plans.get(idx).getOperations()); } }); metaClient = new HoodieTableMetaClient(metaClient.getHadoopConf(), basePath, true); Map<String, Pair<String, HoodieCompactionOperation>> pendingCompactionMap = CompactionUtils .getAllPendingCompactionOperations(metaClient); Map<String, Pair<String, HoodieCompactionOperation>> expPendingCompactionMap = generateExpectedCompactionOperations( Arrays.asList(plan1, plan2, plan3, plan4)); // Ensure all the Assert.assertEquals(expPendingCompactionMap, pendingCompactionMap); }
From source file:org.gradoop.flink.model.impl.operators.matching.single.cypher.common.pojos.EmbeddingMetaDataTest.java
@Test public void testGetEdgeVariables() throws Exception { EmbeddingMetaData metaData = new EmbeddingMetaData(); List<String> inputVariables = Arrays.asList("a", "b", "c", "d"); IntStream.range(0, inputVariables.size()).forEach(i -> metaData.setEntryColumn(inputVariables.get(i), i % 2 == 0 ? EntryType.VERTEX : EntryType.EDGE, i)); List<String> expectedVariables = inputVariables.stream().filter(var -> inputVariables.indexOf(var) % 2 == 1) .collect(Collectors.toList()); assertThat(metaData.getEdgeVariables(), is(expectedVariables)); }
From source file:com.asakusafw.runtime.io.text.directio.AbstractTextStreamFormatTest.java
private MockFormat format(int columns, HeaderType headerType) { return format(headerType, IntStream.range(0, columns).mapToObj(i -> "p" + i).toArray(String[]::new)); }
From source file:fi.csc.kapaVirtaAS.MessageTransformer.java
private Stream<Node> namedNodeMapToStream(NamedNodeMap namedNodeMap) { return IntStream.range(0, namedNodeMap.getLength()).mapToObj(namedNodeMap::item); }
From source file:it.greenvulcano.gvesb.virtual.gv_multipart.MultipartCallOperation.java
/** * Adds a Form Part on the Multipart call * /*ww w . j a v a 2 s. c o m*/ * @param nodeList * @param name */ private void createFormPart(NodeList nodeList, String name, String contentType) { this.contentType = getContentType(contentType); StringBody stringBody = new StringBody(name, this.contentType); formBodyBuilder = FormBodyPartBuilder.create(name, stringBody); IntStream.range(0, nodeList.getLength()).mapToObj(nodeList::item).forEach(node -> { try { if (!("Content-Type".equals(XMLConfig.get(node, "@name")))) { formBodyBuilder.addField(XMLConfig.get(node, "@name"), XMLConfig.get(node, "@value")); } } catch (XMLConfigException e) { e.printStackTrace(); } }); formBodyPart = formBodyBuilder.setName(name).build(); multipartEntityBuilder.addPart(formBodyPart); }
From source file:org.ligoj.app.plugin.prov.aws.ProvAwsTerraformServiceTest.java
/** * generate a quote instance for test purpose * * @param type/*from w ww . j a v a 2 s . co m*/ * instance type (Spot or OnDemand * @return quote instance */ private ProvQuoteInstance newQuoteInstance(final String name, final VmOs os, final Double maxVariableCost, final int min, final Integer max, int... storages) { final ProvQuoteInstance quoteInstance = new ProvQuoteInstance(); final ProvInstanceType instance = new ProvInstanceType(); instance.setName("t2.micro"); final ProvInstancePrice instancePrice = new ProvInstancePrice(); instancePrice.setType(instance); instancePrice.setOs(os); final ProvInstancePriceTerm instancePriceType = new ProvInstancePriceTerm(); instancePriceType.setName("some"); instancePrice.setTerm(instancePriceType); quoteInstance.setPrice(instancePrice); quoteInstance.setId(1); quoteInstance.setName(name); quoteInstance.setMaxVariableCost(maxVariableCost); quoteInstance.setMinQuantity(min); quoteInstance.setMaxQuantity(max); quoteInstance.setOs(os); quoteInstance.setStorages(new ArrayList<>()); IntStream.range(0, storages.length).forEach(idx -> { final ProvQuoteStorage storage = new ProvQuoteStorage(); storage.setQuoteInstance(quoteInstance); ProvStoragePrice price = new ProvStoragePrice(); ProvStorageType type = new ProvStorageType(); type.setName("gp2"); price.setType(type); storage.setPrice(price); storage.setName(name + "-storage-" + idx); storage.setSize(storages[idx]); quoteInstance.getStorages().add(storage); }); return quoteInstance; }
From source file:org.apache.nifi.toolkit.tls.standalone.TlsToolkitStandaloneCommandLine.java
private List<String> getPasswords(String arg, CommandLine commandLine, int num, String numArg) throws CommandLineParseException { String[] optionValues = commandLine.getOptionValues(arg); if (optionValues == null) { return IntStream.range(0, num).mapToObj(operand -> passwordUtil.generatePassword()) .collect(Collectors.toList()); }/*w w w . j ava 2s. c o m*/ if (optionValues.length == 1) { return IntStream.range(0, num).mapToObj(value -> optionValues[0]).collect(Collectors.toList()); } else if (optionValues.length == num) { return Arrays.stream(optionValues).collect(Collectors.toList()); } return printUsageAndThrow( "Expected either 1 value or " + num + " (the number of " + numArg + ") values for " + arg, ExitCode.ERROR_INCORRECT_NUMBER_OF_PASSWORDS); }
From source file:fi.csc.kapaVirtaAS.MessageTransformer.java
private Stream<Node> nodeListToStream(NodeList nodelist) { return IntStream.range(0, nodelist.getLength()).mapToObj(nodelist::item) .filter(item -> item.getNodeType() == Node.ELEMENT_NODE); }
From source file:org.gradoop.flink.model.impl.operators.matching.single.cypher.common.pojos.EmbeddingMetaDataTest.java
@Test public void testGetPathVariables() throws Exception { EmbeddingMetaData metaData = new EmbeddingMetaData(); List<String> inputVariables = Arrays.asList("a", "b", "c", "d"); IntStream.range(0, inputVariables.size()).forEach(i -> metaData.setEntryColumn(inputVariables.get(i), i % 2 == 0 ? EntryType.VERTEX : EntryType.PATH, i)); List<String> expectedVariables = inputVariables.stream().filter(var -> inputVariables.indexOf(var) % 2 == 1) .collect(Collectors.toList()); assertThat(metaData.getPathVariables(), is(expectedVariables)); }
From source file:org.apache.nifi.processors.standard.GenerateTableFetch.java
@Override public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException { ProcessSession session = sessionFactory.createSession(); FlowFile fileToProcess = null;//ww w . ja v a 2 s. c o m if (context.hasIncomingConnection()) { fileToProcess = session.get(); if (fileToProcess == null) { // Incoming connection with no flow file available, do no work (see capability description) return; } } final ComponentLog logger = getLogger(); final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class); final DatabaseAdapter dbAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue()); final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(fileToProcess) .getValue(); final String columnNames = context.getProperty(COLUMN_NAMES).evaluateAttributeExpressions(fileToProcess) .getValue(); final String maxValueColumnNames = context.getProperty(MAX_VALUE_COLUMN_NAMES) .evaluateAttributeExpressions(fileToProcess).getValue(); final int partitionSize = context.getProperty(PARTITION_SIZE).evaluateAttributeExpressions(fileToProcess) .asInteger(); final StateManager stateManager = context.getStateManager(); final StateMap stateMap; try { stateMap = stateManager.getState(Scope.CLUSTER); } catch (final IOException ioe) { logger.error("Failed to retrieve observed maximum values from the State Manager. Will not perform " + "query until this is accomplished.", ioe); context.yield(); return; } try { // Make a mutable copy of the current state property map. This will be updated by the result row callback, and eventually // set as the current state map (after the session has been committed) final Map<String, String> statePropertyMap = new HashMap<>(stateMap.toMap()); // Build a WHERE clause with maximum-value columns (if they exist), and a list of column names that will contain MAX(<column>) aliases. The // executed SQL query will retrieve the count of all records after the filter(s) have been applied, as well as the new maximum values for the // specified columns. This allows the processor to generate the correctly partitioned SQL statements as well as to update the state with the // latest observed maximum values. String whereClause = null; List<String> maxValueColumnNameList = StringUtils.isEmpty(maxValueColumnNames) ? new ArrayList<>(0) : Arrays.asList(maxValueColumnNames.split("\\s*,\\s*")); List<String> maxValueClauses = new ArrayList<>(maxValueColumnNameList.size()); String columnsClause = null; List<String> maxValueSelectColumns = new ArrayList<>(maxValueColumnNameList.size() + 1); maxValueSelectColumns.add("COUNT(*)"); // For each maximum-value column, get a WHERE filter and a MAX(column) alias IntStream.range(0, maxValueColumnNameList.size()).forEach((index) -> { String colName = maxValueColumnNameList.get(index); maxValueSelectColumns.add("MAX(" + colName + ") " + colName); final String fullyQualifiedStateKey = getStateKey(tableName, colName); String maxValue = statePropertyMap.get(fullyQualifiedStateKey); if (StringUtils.isEmpty(maxValue) && !isDynamicTableName) { // If the table name is static and the fully-qualified key was not found, try just the column name maxValue = statePropertyMap.get(getStateKey(null, colName)); } if (!StringUtils.isEmpty(maxValue)) { Integer type = columnTypeMap.get(fullyQualifiedStateKey); if (type == null && !isDynamicTableName) { // If the table name is static and the fully-qualified key was not found, try just the column name type = columnTypeMap.get(getStateKey(null, colName)); } if (type == null) { // This shouldn't happen as we are populating columnTypeMap when the processor is scheduled or when the first maximum is observed throw new IllegalArgumentException("No column type found for: " + colName); } // Add a condition for the WHERE clause maxValueClauses.add(colName + (index == 0 ? " > " : " >= ") + getLiteralByType(type, maxValue, dbAdapter.getName())); } }); whereClause = StringUtils.join(maxValueClauses, " AND "); columnsClause = StringUtils.join(maxValueSelectColumns, ", "); // Build a SELECT query with maximum-value columns (if present) final String selectQuery = dbAdapter.getSelectStatement(tableName, columnsClause, whereClause, null, null, null); long rowCount = 0; try (final Connection con = dbcpService.getConnection(); final Statement st = con.createStatement()) { final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT) .evaluateAttributeExpressions(fileToProcess).asTimePeriod(TimeUnit.SECONDS).intValue(); st.setQueryTimeout(queryTimeout); // timeout in seconds logger.debug("Executing {}", new Object[] { selectQuery }); ResultSet resultSet; resultSet = st.executeQuery(selectQuery); if (resultSet.next()) { // Total row count is in the first column rowCount = resultSet.getLong(1); // Update the state map with the newly-observed maximum values ResultSetMetaData rsmd = resultSet.getMetaData(); for (int i = 2; i <= rsmd.getColumnCount(); i++) { //Some JDBC drivers consider the columns name and label to be very different things. // Since this column has been aliased lets check the label first, // if there is no label we'll use the column name. String resultColumnName = (StringUtils.isNotEmpty(rsmd.getColumnLabel(i)) ? rsmd.getColumnLabel(i) : rsmd.getColumnName(i)).toLowerCase(); String fullyQualifiedStateKey = getStateKey(tableName, resultColumnName); String resultColumnCurrentMax = statePropertyMap.get(fullyQualifiedStateKey); if (StringUtils.isEmpty(resultColumnCurrentMax) && !isDynamicTableName) { // If we can't find the value at the fully-qualified key name and the table name is static, it is possible (under a previous scheme) // the value has been stored under a key that is only the column name. Fall back to check the column name; either way, when a new // maximum value is observed, it will be stored under the fully-qualified key from then on. resultColumnCurrentMax = statePropertyMap.get(resultColumnName); } int type = rsmd.getColumnType(i); if (isDynamicTableName) { // We haven't pre-populated the column type map if the table name is dynamic, so do it here columnTypeMap.put(fullyQualifiedStateKey, type); } try { String newMaxValue = getMaxValueFromRow(resultSet, i, type, resultColumnCurrentMax, dbAdapter.getName()); if (newMaxValue != null) { statePropertyMap.put(fullyQualifiedStateKey, newMaxValue); } } catch (ParseException | IOException pie) { // Fail the whole thing here before we start creating flow files and such throw new ProcessException(pie); } } } else { // Something is very wrong here, one row (even if count is zero) should be returned throw new SQLException("No rows returned from metadata query: " + selectQuery); } final long numberOfFetches = (partitionSize == 0) ? rowCount : (rowCount / partitionSize) + (rowCount % partitionSize == 0 ? 0 : 1); // Generate SQL statements to read "pages" of data for (long i = 0; i < numberOfFetches; i++) { long limit = partitionSize == 0 ? null : partitionSize; long offset = partitionSize == 0 ? null : i * partitionSize; final String query = dbAdapter.getSelectStatement(tableName, columnNames, whereClause, StringUtils.join(maxValueColumnNameList, ", "), limit, offset); FlowFile sqlFlowFile = (fileToProcess == null) ? session.create() : session.create(fileToProcess); sqlFlowFile = session.write(sqlFlowFile, out -> out.write(query.getBytes())); session.transfer(sqlFlowFile, REL_SUCCESS); } if (fileToProcess != null) { session.remove(fileToProcess); } } catch (SQLException e) { if (fileToProcess != null) { logger.error("Unable to execute SQL select query {} due to {}, routing {} to failure", new Object[] { selectQuery, e, fileToProcess }); fileToProcess = session.putAttribute(fileToProcess, "generatetablefetch.sql.error", e.getMessage()); session.transfer(fileToProcess, REL_FAILURE); } else { logger.error("Unable to execute SQL select query {} due to {}", new Object[] { selectQuery, e }); throw new ProcessException(e); } } session.commit(); try { // Update the state stateManager.setState(statePropertyMap, Scope.CLUSTER); } catch (IOException ioe) { logger.error( "{} failed to update State Manager, observed maximum values will not be recorded. " + "Also, any generated SQL statements may be duplicated.", new Object[] { this, ioe }); } } catch (final ProcessException pe) { // Log the cause of the ProcessException if it is available Throwable t = (pe.getCause() == null ? pe : pe.getCause()); logger.error("Error during processing: {}", new Object[] { t.getMessage() }, t); session.rollback(); context.yield(); } }