Example usage for java.util LinkedHashMap put

List of usage examples for java.util LinkedHashMap put

Introduction

In this page you can find the example usage for java.util LinkedHashMap put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:MondrianConnector.java

public ArrayList<LinkedHashMap<String, String>> ExecuteQuery(Query queryObject) throws Exception {
    System.setProperty("mondrian.olap.SsasCompatibleNaming", "true");
    String connectionString = getConnectionString(queryObject);
    RolapConnection connection = (RolapConnection) DriverManager.getConnection(connectionString, null);
    mondrian.olap.Query query = connection.parseQuery(queryObject.getMdxQuery());
    Result result = connection.execute(query);
    ArrayList<LinkedHashMap<String, String>> data = new ArrayList<LinkedHashMap<String, String>>();
    DecimalFormat df = new DecimalFormat("0", DecimalFormatSymbols.getInstance(Locale.ENGLISH));
    df.setMaximumFractionDigits(340); //340 = DecimalFormat.DOUBLE_FRACTION_DIGITS
    if (result.getAxes().length == 1) {
        //Only One Axis has come so
        ArrayList<String> measures = new ArrayList<String>();
        for (Position p : result.getAxes()[0].getPositions()) {
            measures.add(p.get(0).getUniqueName().toString());
        }//from  w ww. java2s. c  o m
        LinkedHashMap<String, String> row = new LinkedHashMap<String, String>();
        for (int i = 0; i < measures.size(); i++) {

            Object value = result.getCell(new int[] { i }).getValue();
            if (value == null) {
                row.put(measures.get(i), null);
            } else if (value instanceof Integer) {
                row.put(measures.get(i), ((Integer) value).toString());
            } else if (value instanceof Double) {
                row.put(measures.get(i), df.format(value));
            } else {
                row.put(measures.get(i), value.toString());
            }
        }
        data.add(row);
    } else if (result.getAxes().length == 2) {
        ArrayList<String> measures = new ArrayList<String>();
        for (Position p : result.getAxes()[0].getPositions()) {
            measures.add(p.get(0).getUniqueName().toString());
        }
        ArrayList<ArrayList<DimensionItem>> dimensionItems = new ArrayList<ArrayList<DimensionItem>>();
        for (Position p : result.getAxes()[1].getPositions()) {
            ArrayList<DimensionItem> itemsAtRow = new ArrayList<DimensionItem>();
            for (Object item : p.toArray()) {
                RolapMemberBase member = (RolapMemberBase) item;
                itemsAtRow.add(new DimensionItem(member.getLevel().getHierarchy().toString(),
                        member.getCaption().toString()));
            }
            dimensionItems.add(itemsAtRow);
        }
        for (int ix = 0; ix < dimensionItems.size(); ix++) {
            LinkedHashMap<String, String> row = new LinkedHashMap<String, String>();
            for (DimensionItem item : dimensionItems.get(ix)) {
                row.put(item.getLevel(), item.getCaption());
            }
            for (int i = 0; i < measures.size(); i++) {
                Object value = result.getCell(new int[] { i, ix }).getValue();
                if (value == null) {
                    row.put(measures.get(i), "0");
                } else {
                    if (value instanceof Integer) {
                        row.put(measures.get(i), ((Integer) value).toString());
                    } else if (value instanceof Double) {
                        row.put(measures.get(i), df.format(value));
                    } else {
                        row.put(measures.get(i), value.toString());
                    }
                }
            }
            data.add(row);
        }
    }
    return data;
}

From source file:gov.nih.nci.cabig.caaers.web.ae.AeTab.java

public CompositeField createTimeField(String baseProperty, String displayName, String labelProperty) {
    InputField hrField = InputFieldFactory.createTextField("hourString", "", FieldValidator.NUMBER_VALIDATOR);
    InputField mmField = InputFieldFactory.createTextField("minuteString", " ",
            FieldValidator.NUMBER_VALIDATOR);
    LinkedHashMap<Object, Object> amPmOption = new LinkedHashMap<Object, Object>();
    amPmOption.put("0", "AM");
    amPmOption.put("1", "PM");
    InputField amPmField = InputFieldFactory.createSelectField("type", "", false, amPmOption);
    InputFieldAttributes.setSize(hrField, 2);
    InputFieldAttributes.setSize(mmField, 2);
    return new CompositeField(baseProperty, new DefaultInputFieldGroup(null, displayName).addField(hrField)
            .addField(mmField).addField(amPmField), labelProperty);
}

From source file:com.linkedin.pinot.controller.helix.core.rebalance.DefaultRebalanceSegmentStrategy.java

/**
 * Rebalances serving segments based on autorebalance strategy
 * @param idealState//from w w  w  .j a  v a 2 s. co m
 * @param tableConfig
 * @param targetNumReplicas
 * @return
 */
protected IdealState rebalanceServingSegments(IdealState idealState, TableConfig tableConfig,
        int targetNumReplicas) {

    LOGGER.info("Rebalancing serving segments for table {}", tableConfig.getTableName());

    int numReplicasInIdealState = Integer.parseInt(idealState.getReplicas());

    if (numReplicasInIdealState > targetNumReplicas) {
        // We need to reduce the number of replicas per helix partition.

        for (String segmentName : idealState.getPartitionSet()) {
            Map<String, String> instanceStateMap = idealState.getInstanceStateMap(segmentName);
            if (instanceStateMap.size() > targetNumReplicas) {
                Set<String> keys = instanceStateMap.keySet();
                while (instanceStateMap.size() > targetNumReplicas) {
                    instanceStateMap.remove(keys.iterator().next());
                }
            } else if (instanceStateMap.size() < targetNumReplicas) {
                LOGGER.warn("Table {}, segment {} has {} replicas, less than {} (requested number of replicas)",
                        idealState.getResourceName(), segmentName, instanceStateMap.size(), targetNumReplicas);
            }
        }
    } else {
        // Number of replicas is either the same or higher, so invoke Helix rebalancer.

        final Map<String, Map<String, String>> mapFields = idealState.getRecord().getMapFields();

        Map<String, Map<String, String>> removedEntries = new LinkedHashMap<>();
        if (tableConfig.getTableType().equals(CommonConstants.Helix.TableType.REALTIME)) {
            filterSegmentsForRealtimeRebalance(mapFields, removedEntries);
        }

        if (!mapFields.isEmpty()) {

            String tableNameWithType = tableConfig.getTableName();

            LinkedHashMap<String, Integer> states = new LinkedHashMap<>();
            List<String> segments = Lists.newArrayList(idealState.getPartitionSet());
            states.put(RealtimeSegmentOnlineOfflineStateModel.OFFLINE, 0);
            states.put(RealtimeSegmentOnlineOfflineStateModel.ONLINE, targetNumReplicas);
            Set<String> currentHosts = new HashSet<>();
            for (String segment : mapFields.keySet()) {
                currentHosts.addAll(mapFields.get(segment).keySet());
            }
            List<String> servingInstances = new ArrayList<>();
            List<String> enabledServingInstances = new ArrayList<>();
            getServingInstances(tableConfig, servingInstances, enabledServingInstances);

            AutoRebalanceStrategy rebalanceStrategy = new AutoRebalanceStrategy(tableNameWithType, segments,
                    states);

            LOGGER.info("Current nodes for table {}: {}", tableNameWithType, currentHosts);
            LOGGER.info("New nodes for table {}: {}", tableNameWithType, servingInstances);
            LOGGER.info("Enabled nodes for table: {} {}", tableNameWithType, enabledServingInstances);
            ZNRecord newZnRecord = rebalanceStrategy.computePartitionAssignment(servingInstances,
                    enabledServingInstances, mapFields, new ClusterDataCache());
            final Map<String, Map<String, String>> newMapping = newZnRecord.getMapFields();
            for (Map.Entry<String, Map<String, String>> entry : newMapping.entrySet()) {
                idealState.setInstanceStateMap(entry.getKey(), entry.getValue());
            }
        }

        // If we removed any entries, add them back here
        for (Map.Entry<String, Map<String, String>> entry : removedEntries.entrySet()) {
            idealState.setInstanceStateMap(entry.getKey(), entry.getValue());
        }
    }
    return idealState;
}

From source file:com.sillelien.dollar.api.types.DollarMap.java

@NotNull
@Override// w  w  w . ja va  2  s . com
public var $prepend(@NotNull var value) {
    final LinkedHashMap<var, var> newMap = new LinkedHashMap<>();
    newMap.put(value.$pairKey(), value.$pairValue());
    newMap.putAll(toVarMap().mutable());
    return DollarFactory.fromValue(newMap, errors(), value.errors());
}

From source file:com.hp.hpl.inkml.Ink.java

/**
 * Method to give the markup string data of the Ink data object
 * //  w w  w  .ja v  a2 s  .  c  o m
 * @return String markup string
 */
public String toInkML() {
    final StringBuffer xml = new StringBuffer();
    final LinkedHashMap<String, String> attrMap = new LinkedHashMap<String, String>();
    if (StringUtils.isNotEmpty(this.docID)) {
        attrMap.put("documentID", this.docID);
    }
    xml.append("<ink xmlns=\"" + Ink.INKML_NAMESPACE + "\">");
    // write definitions
    xml.append(this.definitions.toInkML());

    // write Trace data list
    final Iterator<TraceDataElement> iterator = this.traceDataList.iterator();
    while (iterator.hasNext()) {
        final TraceDataElement data = iterator.next();
        xml.append(data.toInkML());
    }
    xml.append("</ink>");
    return xml.toString();
}

From source file:com.nextdoor.bender.ipc.s3.S3TransporterTest.java

@Test
public void testGzFilename() throws TransportException, IllegalStateException, IOException {
    /*/*w w w.j a v a  2s . com*/
     * Create mock client, requests, and replies
     */
    AmazonS3Client mockClient = getMockClient();

    /*
     * Fill buffer with mock data
     */
    S3TransportBuffer buffer = new S3TransportBuffer(1000, true, new S3TransportSerializer());
    InternalEvent mockIevent = mock(InternalEvent.class);
    doReturn("foo").when(mockIevent).getSerialized();

    /*
     * Create transport
     */
    Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0);
    S3Transport transport = new S3Transport(mockClient, "bucket", "basepath/", true, multiPartUploads);

    /*
     * Do actual test
     */
    buffer.add(mockIevent);
    LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>();
    partitions.put(S3Transport.FILENAME_KEY, "a_filename.gz");
    ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class);
    transport.sendBatch(buffer, partitions, new TestContext());
    verify(mockClient).uploadPart(argument.capture());

    /*
     * Check results
     */
    assertEquals("basepath/a_filename.bz2", argument.getValue().getKey());
}

From source file:org.jutge.joc.porra.controller.base.BetController.java

@EntityStashManaged(entities = EntityStashEntityModule.ALL, views = EntityStashViewModule.NONE)
@SuppressWarnings({ "rawtypes", "unchecked" })
@RequestMapping(value = "/usuari/aposta-guanyador/{jugador}", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
public @ResponseBody LinkedHashMap deleteWinnerBet(@PathVariable final String jugador,
        final EntityStash entityStash, final HttpServletRequest request, final Locale locale) {
    this.logger.info("BetController.deleteWinnerBet");
    final LinkedHashMap response = new LinkedHashMap();
    try {//www. ja v a2s  .  c o  m
        this.betService.deleteWinnerBet(jugador.replace(" ", "."), entityStash);
        this.createResponse(response, entityStash, locale);
    } catch (final BetException exception) {
        response.put("errors", exception.getErrorMessages());
    }
    return response;
}

From source file:com.exalttech.trex.ui.views.importPcap.ImportedPacketTableView.java

/**
 * Build vm instructions for source/destination ipv4
 *
 * @param packetData//from   w  w w .j a  v  a 2  s  . c  om
 * @return
 */
public Map<String, Object> getVm(ImportPcapTableData packetData) {
    VMInstructionBuilder vmInstructionBuilder = new VMInstructionBuilder(packetData.hasVlan(),
            packetData.getPacketType().indexOf("UDP") != -1);
    ArrayList<Object> instructionsList = new ArrayList<>();

    if (propertiesBinder.isDestinationEnabled()) {
        instructionsList.addAll(vmInstructionBuilder.addVmInstruction(
                getInstructionType(packetData, propertiesBinder.getDstAddress()), propertiesBinder.getDstMode(),
                propertiesBinder.getDstCount(), "1", propertiesBinder.getDstAddress()));
    }
    if (propertiesBinder.isSourceEnabled()) {
        instructionsList.addAll(vmInstructionBuilder.addVmInstruction(
                getInstructionType(packetData, propertiesBinder.getSrcAddress()), propertiesBinder.getSrcMode(),
                propertiesBinder.getSrcCount(), "1", propertiesBinder.getSrcAddress()));
    }

    // add ipv4 checksum instructions
    instructionsList.addAll(vmInstructionBuilder.addChecksumInstruction());

    Map<String, Object> additionalProperties = new HashMap<>();

    LinkedHashMap<String, Object> vmBody = new LinkedHashMap<>();
    vmBody.put("split_by_var", vmInstructionBuilder.getSplitByVar());
    vmBody.put("instructions", instructionsList);

    // add cache size
    vmInstructionBuilder.addCacheSize(vmBody);

    additionalProperties.put("vm", vmBody);

    return additionalProperties;
}

From source file:com.nextdoor.bender.ipc.s3.S3TransporterTest.java

@Test
public void testUnpartitioned() throws TransportException, IllegalStateException, IOException {
    /*//from   w  w w . j  ava  2  s  . c  o m
     * Create mock client, requets, and replies
     */
    AmazonS3Client mockClient = getMockClient();

    /*
     * Fill buffer with mock data
     */
    S3TransportBuffer buffer = new S3TransportBuffer(1000, false, new S3TransportSerializer());
    InternalEvent mockIevent = mock(InternalEvent.class);
    doReturn("foo").when(mockIevent).getSerialized();

    /*
     * Create transport
     */
    Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0);
    S3Transport transport = new S3Transport(mockClient, "bucket", "basepath", false, multiPartUploads);

    /*
     * Do actual test
     */
    buffer.add(mockIevent);
    LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>();
    partitions.put(S3Transport.FILENAME_KEY, "a_filename");

    ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class);
    transport.sendBatch(buffer, partitions, new TestContext());
    verify(mockClient).uploadPart(argument.capture());

    /*
     * Check results
     */
    assertEquals("bucket", argument.getValue().getBucketName());
    assertEquals("basepath/a_filename", argument.getValue().getKey());
    assertEquals(1, argument.getValue().getPartNumber());
    assertEquals(4, argument.getValue().getPartSize()); // foo\n
    assertEquals("123", argument.getValue().getUploadId());
}

From source file:com.hp.hpl.inkml.Ink.java

/**
 * Method used by the Archiver component (InkMLWriter) to save the markup data of the Ink data object to file or other data stream
 * /*from w w w.java2s .co  m*/
 * @param writer
 */

public void writeXML(final InkMLWriter writer) throws IOException, InkMLException {
    if (writer == null) {
        Ink.logger.severe("Ink:writeXML, InkMLWriter object not available (null)!!!");
        throw new InkMLException("Ink:writeXML, InkMLWriter object not available (null)!!!");
    }
    final LinkedHashMap<String, String> attrMap = new LinkedHashMap<String, String>();
    if (StringUtils.isNotEmpty(this.docID)) {
        attrMap.put("documentID", this.docID);
    }
    attrMap.put("xmlns", Ink.INKML_NAMESPACE);
    writer.writeStartTag("ink", attrMap);
    writer.incrementTagLevel();
    // write definitions
    this.definitions.writeXML(writer);

    // write Trace data list
    final Iterator<TraceDataElement> iterator = this.traceDataList.iterator();
    while (iterator.hasNext()) {
        final TraceDataElement data = iterator.next();
        data.writeXML(writer);
    }
    writer.decrementTagLevel();
    writer.writeEndTag("ink");
}