List of usage examples for java.lang Integer compare
public static int compare(int x, int y)
From source file:us.parr.animl.data.DataTable.java
public int compare(int a, int b, VariableType colType) { switch (colType) { case CATEGORICAL_INT: case NUMERICAL_INT: case CATEGORICAL_STRING: // strings are encoded as ints case TARGET_CATEGORICAL_STRING: case TARGET_CATEGORICAL_INT: case UNUSED_INT: case UNUSED_STRING: return Integer.compare(a, b); case NUMERICAL_FLOAT: case UNUSED_FLOAT: float af = getAsFloat(a); float bf = getAsFloat(b); return Float.compare(af, bf); default:/*from www .j av a 2s . c om*/ throw new IllegalArgumentException("invalid type: " + colType); } }
From source file:org.apache.storm.cluster.StormClusterStateImpl.java
@Override public List<ErrorInfo> errors(String stormId, String componentId) { List<ErrorInfo> errorInfos = new ArrayList<>(); String path = ClusterUtils.errorPath(stormId, componentId); if (stateStorage.node_exists(path, false)) { List<String> childrens = stateStorage.get_children(path, false); for (String child : childrens) { String childPath = path + ClusterUtils.ZK_SEPERATOR + child; ErrorInfo errorInfo = ClusterUtils.maybeDeserialize(stateStorage.get_data(childPath, false), ErrorInfo.class); if (errorInfo != null) errorInfos.add(errorInfo); }/* w w w . j a v a 2s . c o m*/ } Collections.sort(errorInfos, new Comparator<ErrorInfo>() { public int compare(ErrorInfo arg0, ErrorInfo arg1) { return Integer.compare(arg1.get_error_time_secs(), arg0.get_error_time_secs()); } }); return errorInfos; }
From source file:org.apache.giraph.master.BspServiceMaster.java
/** * Read the finalized checkpoint file and associated metadata files for the * checkpoint. Modifies the {@link PartitionOwner} objects to get the * checkpoint prefixes. It is an optimization to prevent all workers from * searching all the files. Also read in the aggregator data from the * finalized checkpoint file and setting it. * * @param superstep Checkpoint set to examine. * @throws IOException// w w w . ja v a 2 s. com * @throws InterruptedException * @throws KeeperException * @return Collection of generated partition owners. */ private Collection<PartitionOwner> prepareCheckpointRestart(long superstep) throws IOException, KeeperException, InterruptedException { List<PartitionOwner> partitionOwners = new ArrayList<>(); FileSystem fs = getFs(); String finalizedCheckpointPath = getSavedCheckpointBasePath(superstep) + CheckpointingUtils.CHECKPOINT_FINALIZED_POSTFIX; LOG.info("Loading checkpoint from " + finalizedCheckpointPath); DataInputStream finalizedStream = fs.open(new Path(finalizedCheckpointPath)); GlobalStats globalStats = new GlobalStats(); globalStats.readFields(finalizedStream); updateCounters(globalStats); SuperstepClasses superstepClasses = SuperstepClasses.createToRead(getConfiguration()); superstepClasses.readFields(finalizedStream); getConfiguration().updateSuperstepClasses(superstepClasses); int prefixFileCount = finalizedStream.readInt(); String checkpointFile = finalizedStream.readUTF(); for (int i = 0; i < prefixFileCount; ++i) { int mrTaskId = finalizedStream.readInt(); DataInputStream metadataStream = fs.open( new Path(checkpointFile + "." + mrTaskId + CheckpointingUtils.CHECKPOINT_METADATA_POSTFIX)); long partitions = metadataStream.readInt(); WorkerInfo worker = getWorkerInfoById(mrTaskId); for (long p = 0; p < partitions; ++p) { int partitionId = metadataStream.readInt(); PartitionOwner partitionOwner = new BasicPartitionOwner(partitionId, worker); partitionOwners.add(partitionOwner); LOG.info("prepareCheckpointRestart partitionId=" + partitionId + " assigned to " + partitionOwner); } metadataStream.close(); } //Ordering appears to be important as of right now we rely on this ordering //in WorkerGraphPartitioner Collections.sort(partitionOwners, new Comparator<PartitionOwner>() { @Override public int compare(PartitionOwner p1, PartitionOwner p2) { return Integer.compare(p1.getPartitionId(), p2.getPartitionId()); } }); globalCommHandler.getAggregatorHandler().readFields(finalizedStream); aggregatorTranslation.readFields(finalizedStream); masterCompute.readFields(finalizedStream); finalizedStream.close(); return partitionOwners; }
From source file:net.sf.maltcms.chromaui.chromatogram1Dviewer.ui.panel.Chromatogram1DHeatmapViewerPanel.java
public void setPlot(final XYPlot plot) { removeAxisListener();// ww w . j av a 2s . c o m ADataset1D<?, IScan> dataset = null; if (plot.getDataset() instanceof ADataset1D) { dataset = (ADataset1D<?, IScan>) plot.getDataset(); } else { throw new IllegalArgumentException("Requires a plot with ADataset1D!"); } this.plot = plot; if (this.selectionOverlay != null) { this.content.remove(selectionOverlay); this.selectionOverlay = null; } if (selectionOverlay == null) { selectionOverlay = new SelectionOverlay(Color.LIGHT_GRAY, Color.RED, 2.5f, 2.5f, 0.66f); chartPanel.addOverlay(selectionOverlay); selectionOverlay.addChangeListener(chartPanel); this.content.add(selectionOverlay); } else { for (ISelection selection : selectionOverlay.getMouseClickSelection()) { selection.setDataset(dataset); } ISelection selection = selectionOverlay.getMouseHoverSelection(); if (selection != null) { selection.setDataset(dataset); } } if (selectionHandler == null) { selectionHandler = new InstanceContentSelectionHandler(this.content, selectionOverlay, InstanceContentSelectionHandler.Mode.valueOf((String) modeSpinner.getValue()), dataset, 1); } else { selectionHandler.setDataset(dataset); } if (mouseSelectionHandler == null) { mouseSelectionHandler = new XYMouseSelectionHandler<>(dataset); mouseSelectionHandler.addSelectionChangeListener(selectionOverlay); mouseSelectionHandler.addSelectionChangeListener(selectionHandler); chartPanel.addChartMouseListener(mouseSelectionHandler); } else { mouseSelectionHandler.setDataset(dataset); } XYItemRenderer xyir = plot.getRenderer(); if (xyir instanceof XYBlockRenderer) { XYBlockRenderer xybr = (XYBlockRenderer) xyir; boxWidthSpinner.setValue(xybr.getBlockWidth()); boxHeightSpinner.setValue(xybr.getBlockHeight()); } AxisChangeListener listener = selectionOverlay; ValueAxis domain = this.plot.getDomainAxis(); ValueAxis range = this.plot.getRangeAxis(); if (domain != null) { domain.addChangeListener(listener); } if (range != null) { range.addChangeListener(listener); } this.plot.setNoDataMessage("Loading Data..."); chart = new JFreeChart(this.plot); chartPanel.setChart(chart); dmkl = new DomainMarkerKeyListener(this.plot); dmkl.setPlot(this.plot); chartPanel.addKeyListener(dmkl); addAxisListener(); //add available chart overlays ArrayList<? extends Overlay> overlays = new ArrayList<>(getLookup().lookupAll(Overlay.class)); Collections.sort(overlays, new Comparator<Overlay>() { @Override public int compare(Overlay o1, Overlay o2) { if (o1 instanceof ChartOverlay && o2 instanceof ChartOverlay) { ChartOverlay co1 = (ChartOverlay) o1; ChartOverlay co2 = (ChartOverlay) o2; return Integer.compare(co1.getLayerPosition(), co2.getLayerPosition()); } else { return 0; } } }); for (Overlay overlay : overlays) { if (!(overlay instanceof SelectionOverlay)) { chartPanel.removeOverlay(overlay); if (overlay instanceof AxisChangeListener) { AxisChangeListener axisChangeListener = (AxisChangeListener) overlay; if (domain != null) { domain.addChangeListener(axisChangeListener); } if (range != null) { range.addChangeListener(axisChangeListener); } } if (overlay instanceof ISelectionChangeListener) { ISelectionChangeListener isl = (ISelectionChangeListener) overlay; mouseSelectionHandler.addSelectionChangeListener(isl); mouseSelectionHandler.addSelectionChangeListener(selectionHandler); selectionOverlay.addChangeListener(chartPanel); } chartPanel.addOverlay(overlay); overlay.addChangeListener(chartPanel); } } //add selection overlay last chartPanel.removeOverlay(selectionOverlay); chartPanel.addOverlay(selectionOverlay); setViewPortAround((double) jSlider2.getValue()); double rangeValue = chartPanel.getChart().getXYPlot().getDomainAxis().getAutoRangeMinimumSize(); ((NumberAxis) chartPanel.getChart().getXYPlot().getDomainAxis()).setAutoRange(false); // chartPanel.getChart().getXYPlot().getDomainAxis().setFixedDimension(rangeValue / 10.0d); ((NumberAxis) chartPanel.getChart().getXYPlot().getDomainAxis()).setAutoRangeIncludesZero(false); ((NumberAxis) chartPanel.getChart().getXYPlot().getDomainAxis()).setRangeType(RangeType.POSITIVE); }
From source file:com.vmware.photon.controller.apife.backends.TaskDcpBackend.java
private Task toApiRepresentation(TaskEntity taskEntity) { Task task = new Task(); task.setId(taskEntity.getId());/*w ww . jav a 2 s . c om*/ task.setQueuedTime(taskEntity.getQueuedTime()); task.setStartedTime(taskEntity.getStartedTime()); task.setEndTime(taskEntity.getEndTime()); task.setOperation(taskEntity.getOperation().toString()); task.setState(taskEntity.getState().toString()); Task.Entity entity = new Task.Entity(); entity.setId(taskEntity.getEntityId()); entity.setKind(taskEntity.getEntityKind()); task.setEntity(entity); if (StringUtils.isNotBlank(taskEntity.getResourceProperties())) { try { Object resourceProperties = objectMapper.readValue(taskEntity.getResourceProperties(), Object.class); task.setResourceProperties(resourceProperties); } catch (IOException e) { logger.error("Error deserializing taskEntity resourceProperties {}", taskEntity.getResourceProperties(), e); throw new IllegalArgumentException( String.format("Error deserializing taskEntity resourceProperties %s, error %s", taskEntity.getResourceProperties(), e.getMessage())); } } List<Step> steps = new ArrayList<>(); Collections.sort(taskEntity.getSteps(), new Comparator<StepEntity>() { @Override public int compare(StepEntity s1, StepEntity s2) { return Integer.compare(s1.getSequence(), s2.getSequence()); } }); for (StepEntity stepEntity : taskEntity.getSteps()) { steps.add(toApiRepresentation(stepEntity)); } task.setSteps(steps); return task; }
From source file:com.evolveum.midpoint.model.impl.lens.LensUtil.java
/** * Returns a list of context that have equivalent discriminator with the reference context. Ordered by "order" in the * discriminator.//from www . j ava 2s.c o m */ public static <F extends ObjectType> List<LensProjectionContext> findRelatedContexts(LensContext<F> context, LensProjectionContext refProjCtx) { List<LensProjectionContext> projCtxs = new ArrayList<LensProjectionContext>(); ResourceShadowDiscriminator refDiscr = refProjCtx.getResourceShadowDiscriminator(); if (refDiscr == null) { return projCtxs; } for (LensProjectionContext aProjCtx : context.getProjectionContexts()) { ResourceShadowDiscriminator aDiscr = aProjCtx.getResourceShadowDiscriminator(); if (refDiscr.equivalent(aDiscr)) { projCtxs.add(aProjCtx); } } Comparator<? super LensProjectionContext> orderComparator = new Comparator<LensProjectionContext>() { @Override public int compare(LensProjectionContext ctx1, LensProjectionContext ctx2) { int order1 = ctx1.getResourceShadowDiscriminator().getOrder(); int order2 = ctx2.getResourceShadowDiscriminator().getOrder(); return Integer.compare(order1, order2); } }; Collections.sort(projCtxs, orderComparator); return projCtxs; }
From source file:ubic.gemma.core.loader.expression.geo.DatasetCombiner.java
private void sortMap(final List<String> sampleAccs, final List<String> objects, final Map<String, String> map) { Collections.sort(objects, new Comparator<String>() { @Override/*from www . ja v a2 s. c o m*/ public int compare(String arg0, String arg1) { int numSamples0 = 0; int numSamples1 = 0; for (String targetAcc : sampleAccs) { // skip samples that are not in this data set. if (map.get(targetAcc).equals(arg0)) { numSamples0++; } else if (map.get(targetAcc).equals(arg1)) { numSamples1++; } } return Integer.compare(numSamples0, numSamples1); } }); }
From source file:org.apache.carbondata.sdk.file.CarbonReaderTest.java
@Test public void testReadSchemaInDataFileAndSort() throws IOException, InterruptedException { String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT); String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT); String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL); String rootPath = new File(this.getClass().getResource("/").getPath() + "../../").getCanonicalPath(); String storeLocation = rootPath + "/target/"; carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation) .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss") .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT"); String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[9]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); try {/* w w w .j av a 2s . c o m*/ CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path); CarbonWriter writer = builder.withCsvInput(new Schema(fields)).writtenBy("CarbonReaderTest").build(); for (int i = 0; i < 100; i++) { String[] row2 = new String[] { "robot" + (i % 10), String.valueOf(i), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345" }; writer.write(row2); } writer.close(); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } File[] dataFiles2 = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith("carbondata"); } }); Schema schema = CarbonSchemaReader.readSchema(dataFiles2[0].getAbsolutePath()); // sort the schema Arrays.sort(schema.getFields(), new Comparator<Field>() { @Override public int compare(Field o1, Field o2) { return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal()); } }); // Transform the schema String[] strings = new String[schema.getFields().length]; for (int i = 0; i < schema.getFields().length; i++) { strings[i] = (schema.getFields())[i].getFieldName(); } File folder = new File(path); Assert.assertTrue(folder.exists()); CarbonReader reader = CarbonReader.builder(path, "_temp").projection(strings).build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); int id = (int) row[2]; Assert.assertEquals("robot" + (id % 10), row[0]); Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]); Assert.assertEquals(Long.MAX_VALUE - id, row[3]); Assert.assertEquals((double) id / 2, row[4]); Assert.assertEquals(true, (boolean) row[5]); long day = 24L * 3600 * 1000; Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString()); Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString()); i++; } Assert.assertEquals(i, 100); reader.close(); FileUtils.deleteDirectory(new File(path)); carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timestampFormat); carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, badRecordAction); carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, badRecordLoc); }
From source file:jef.database.DbMetaData.java
/** * //from w w w . j ava 2s . c o m * * @param tableName * ?? * @return Map<String,String> key=?? value=?? */ public Optional<PrimaryKey> getPrimaryKey(String tableName) throws SQLException { tableName = MetaHolder.toSchemaAdjustedName(tableName); tableName = info.profile.getObjectNameToUse(tableName); Connection conn = getConnection(false); DatabaseMetaData databaseMetaData = conn.getMetaData(); ResultSet rs = null; PrimaryKey pk = null; List<PairIS> pkColumns = new ArrayList<PairIS>(); try { rs = databaseMetaData.getPrimaryKeys(null, schema, tableName); while (rs.next()) { String pkName = rs.getString("PK_NAME"); String col = rs.getString("COLUMN_NAME"); int seq = rs.getShort("KEY_SEQ"); if (pk == null) { pk = new PrimaryKey(pkName); } pkColumns.add(new PairIS(seq, col)); } if (pk == null) return Optional.empty(); } finally { DbUtils.close(rs); releaseConnection(conn); } pkColumns.sort((a, b) -> Integer.compare(a.first, b.first)); String[] columns = new String[pkColumns.size()]; for (int i = 0; i < pkColumns.size(); i++) { columns[i] = pkColumns.get(i).second; } pk.setColumns(columns); return Optional.of(pk); }
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchIndexUtils.java
/** Creates a mapping for the bucket - columnar elements * ALSO INCLUDES THE PER-FIELD CONFIGURATION FROM THE SEARCH_INDEX_SCHEMA AND TEMPORAL_SCHMEA * @param bucket// w ww . j av a 2s . c om * @return * @throws IOException */ public static XContentBuilder getColumnarMapping(final DataBucketBean bucket, Optional<XContentBuilder> to_embed, final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups, final JsonNode enabled_not_analyzed, final JsonNode enabled_analyzed, final JsonNode default_not_analyzed, final JsonNode default_analyzed, final Optional<JsonNode> doc_schema, final SearchIndexSchemaDefaultBean search_index_schema_override, final ObjectMapper mapper, final String index_type) { try { final XContentBuilder start = to_embed.orElse(XContentFactory.jsonBuilder().startObject()); final boolean columnar_enabled = Optional.ofNullable(bucket.data_schema()) .map(DataSchemaBean::columnar_schema).filter(s -> Optional.ofNullable(s.enabled()).orElse(true)) .isPresent(); final Map<Either<String, Tuple2<String, String>>, String> type_override = Optionals .of(() -> bucket.data_schema().search_index_schema().type_override()).map(m -> buildTypeMap(m)) .orElse(Collections.emptyMap()); // If no columnar settings are specified then go with a sensible default final Optional<DataSchemaBean.ColumnarSchemaBean> maybe_user_columnar_schema = Optionals .of(() -> bucket.data_schema().columnar_schema()); final DataSchemaBean.ColumnarSchemaBean columnar_schema = maybe_user_columnar_schema .filter(__ -> columnar_enabled).filter(schema -> (null == schema.field_include_list()) && // ie the entire thing is empty (null == schema.field_exclude_list()) && (null == schema.field_include_pattern_list()) && (null == schema.field_type_include_list()) && (null == schema.field_exclude_pattern_list()) && (null == schema.field_type_exclude_list())) .map(schema -> BeanTemplateUtils.clone(schema) .with(DataSchemaBean.ColumnarSchemaBean::field_type_include_list, Arrays.asList("string", "number", "date")) .done()) .orElseGet(() -> maybe_user_columnar_schema.orElse(null)) // (NOTE: can only be null if columnar_enabled is false) ; final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> column_lookups_pretypes = Stream .of(columnar_enabled ? createFieldIncludeLookups( Optionals.ofNullable(columnar_schema.field_include_list()).stream(), fn -> getKey(fn), field_lookups, enabled_not_analyzed, enabled_analyzed, true, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldExcludeLookups( Optionals.ofNullable(columnar_schema.field_exclude_list()).stream(), fn -> getKey(fn), field_lookups, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldIncludeLookups( Optionals.ofNullable(columnar_schema.field_include_pattern_list()) .stream(), fn -> Either.right(Tuples._2T(fn, "*")), field_lookups, enabled_not_analyzed, enabled_analyzed, true, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldIncludeLookups( Optionals.ofNullable(columnar_schema.field_type_include_list()) .stream(), fn -> Either.right(Tuples._2T("*", fn)), field_lookups, enabled_not_analyzed, enabled_analyzed, true, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldExcludeLookups( Optionals.ofNullable(columnar_schema.field_exclude_pattern_list()) .stream(), fn -> Either.right(Tuples._2T(fn, "*")), field_lookups, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldExcludeLookups( Optionals.ofNullable(columnar_schema.field_type_exclude_list()) .stream(), fn -> Either.right(Tuples._2T("*", fn)), field_lookups, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), // Finally add the default columnar lookups to the unmentioned strings (ensures that *_* is at the end) field_lookups.entrySet().stream() .flatMap(kv -> createFieldIncludeLookups(Stream.of(kv.getKey().toString()), __ -> kv.getKey(), field_lookups, default_not_analyzed, default_analyzed, false, search_index_schema_override, type_override, mapper, index_type))) .flatMap(x -> x).collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2(), (v1, v2) -> v1, // (ie ignore duplicates) () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>())); ; // Also any types that didn't map onto one of the fields or tokens: final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> column_lookups_types = type_override .entrySet().stream() // (filter - convert name/* to name/type and check if I've already created such an entry using the type map) .filter(kv -> !column_lookups_pretypes .containsKey(kv.getKey().either(s -> s, t2 -> Tuples._2T(t2._1(), kv.getValue())))) .flatMap(kv -> createFieldIncludeLookups(Stream.of(kv.getKey().toString()), __ -> kv.getKey().<Either<String, Tuple2<String, String>>>either(s -> Either.left(s), t2 -> Either.right(Tuples._2T(t2._1(), kv.getValue()))), field_lookups, default_not_analyzed, default_analyzed, false, search_index_schema_override, type_override, mapper, index_type)) .collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2(), (v1, v2) -> v1, () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>())); final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> column_lookups = Stream .concat(column_lookups_pretypes.entrySet().stream(), column_lookups_types.entrySet().stream()) .sorted((a, b) -> Integer.compare(sortKey(a.getKey()), sortKey(b.getKey()))) .collect(Collectors.toMap(t2 -> t2.getKey(), t2 -> t2.getValue(), (v1, v2) -> v1, () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>())); final XContentBuilder properties = column_lookups.entrySet().stream() // properties not dynamic_templates .filter(kv -> kv.getKey().isLeft()) // overwrite with version built using columns if it exists .map(kv -> Tuples._2T(kv.getKey(), column_lookups.getOrDefault(kv.getKey(), kv.getValue()))) .reduce(Optional.of(start.startObject("properties")) // add doc_schema if it exists .map(props -> doc_schema .map(ds -> Optionals.streamOf(ds.fields(), false) .reduce(props, Lambdas.wrap_u((acc, kv) -> acc.rawField(kv.getKey(), kv.getValue().toString().getBytes())), (acc1, acc2) -> acc1 // shouldn't be possible )).orElse(props)).get(), Lambdas.wrap_u((acc, t2) -> acc.rawField(t2._1().left().value(), t2._2().toString().getBytes())), // (left by construction) (acc1, acc2) -> acc1) // (not actually possible) .endObject(); final XContentBuilder templates = column_lookups.entrySet().stream() // properties not dynamic_templates .filter(kv -> kv.getKey().isRight()) // overwrite with version built using columns if it exists .map(kv -> Tuples._2T(kv.getKey(), column_lookups.getOrDefault(kv.getKey(), kv.getValue()))) .reduce(properties.startArray("dynamic_templates"), Lambdas.wrap_u((acc, t2) -> acc.startObject() .rawField(getFieldNameFromMatchPair(t2._1().right().value()), t2._2().toString().getBytes()) // (right by construction) .endObject()), (acc1, acc2) -> acc1) // (not actually possible) .endArray(); return templates; } catch (IOException e) { //Handle in-practice-impossible "IOException" return null; } }