Example usage for java.util LinkedHashMap put

List of usage examples for java.util LinkedHashMap put

Introduction

In this page you can find the example usage for java.util LinkedHashMap put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:com.streamsets.pipeline.stage.processor.xmlflattener.TestXMLFlatteningProcessor.java

private LinkedHashMap<String, Field> createExpectedRecord(String prefix, String id, String offset,
        String delimStr, String attrStr, String outputField, boolean addAttrs, boolean addNS) {
    LinkedHashMap<String, Field> fields = new LinkedHashMap<>();
    String baseName = prefix + "contact" + offset + delimStr;

    if (addAttrs) {
        fields.put("contact" + attrStr + "type", Field.create("person"));
        fields.put(baseName + "name" + attrStr + "type", Field.create("maiden"));
    }//from   w w w .  jav a2 s. com

    if (addNS) {
        fields.put(baseName + "name" + attrStr + "xmlns", Field.create("http://blah.com/blah.xml"));
    }
    fields.put(baseName + "name", Field.create("NAME" + id));
    fields.put(baseName + "phone(0)", Field.create("(111)111-1111" + id));
    fields.put(baseName + "phone(1)", Field.create("(222)222-2222" + id));

    if (!StringUtils.isEmpty(outputField)) {
        LinkedHashMap<String, Field> newRoot = new LinkedHashMap<>();
        newRoot.put(outputField, Field.create(Field.Type.MAP, fields));
        fields = newRoot;
    }

    return fields;
}

From source file:org.dataconservancy.packaging.tool.impl.PackageStateSerializationIT.java

/**
 * Insures that unicode characters can be round-tripped through package state serialization using the platform
 * default encoding./* www .  j a v a2 s.c o m*/
 *
 * @throws Exception
 */
@Test
public void testPlatformEncodingRoundTrip() throws Exception {
    ByteArrayOutputStream sink = new ByteArrayOutputStream();
    String unicodeString = "S\u00EDSe\u00F1or!";

    // Package name with a unicode string
    state.setPackageName(unicodeString);

    // Package metadata with a unicode string
    LinkedHashMap<String, List<String>> packageMetadata = new LinkedHashMap<>();
    packageMetadata.put("foo", Collections.singletonList(unicodeString));
    state.setPackageMetadataList(packageMetadata);

    // A String user-defined property value with a unicode string
    Map<URI, List<Property>> userProps = new HashMap<>();
    PropertyType type = new PropertyType();
    type.setPropertyValueType(PropertyValueType.STRING);
    Property property = new Property(type);
    property.setStringValue(unicodeString);
    userProps.put(URI.create("http://a/uri"), Collections.singletonList(property));
    state.setUserSpecifiedProperties(userProps);

    // A IPM node with a unicode string
    Model ipm = ModelFactory.createDefaultModel();
    Statement s = ipm.createStatement(ipm.createResource("foo:s"), ipm.createProperty("foo:p"),
            ipm.createResource(unicodeString));
    ipm.add(s);
    state.setPackageTree(ipm);

    // A domain object with a unicode string
    Model objects = ModelFactory.createDefaultModel();
    s = objects.createStatement(objects.createResource("bar:s"), objects.createProperty("bar:p"),
            objects.createResource(unicodeString));
    objects.add(s);
    state.setDomainObjectRDF(objects);

    // Serialize the state.
    underTest.serialize(state, sink);

    // Deserialize it to a new PackageState instance
    PackageState deserializedPs = new PackageState();
    underTest.deserialize(deserializedPs, new ByteArrayInputStream(sink.toByteArray()));

    // Make sure our characters are there.
    assertEquals(unicodeString, deserializedPs.getPackageName());
    assertEquals(unicodeString, deserializedPs.getPackageMetadataList().get("foo").get(0));
    assertEquals(unicodeString, deserializedPs.getUserSpecifiedProperties().get(URI.create("http://a/uri"))
            .get(0).getStringValue());

    Model objectsPrime = deserializedPs.getDomainObjectRDF();
    assertFalse(objects == objectsPrime);
    assertTrue(objectsPrime.listObjectsOfProperty(objectsPrime.createProperty("bar:p")).next().toString()
            .endsWith(unicodeString));

    Model ipmPrime = deserializedPs.getPackageTree();
    assertFalse(ipm == ipmPrime);
    assertTrue(ipmPrime.listObjectsOfProperty(ipmPrime.createProperty("foo:p")).next().toString()
            .endsWith(unicodeString));
}

From source file:com.espertech.esper.event.EventTypeUtility.java

public static LinkedHashMap<String, Object> validateObjectArrayDef(String[] propertyNames,
        Object[] propertyTypes) {
    if (propertyNames.length != propertyTypes.length) {
        throw new ConfigurationException("Number of property names and property types do not match, found "
                + propertyNames.length + " property names and " + propertyTypes.length + " property types");
    }/*  w w  w .  j a v a  2 s.c o  m*/

    // validate property names for no-duplicates
    Set<String> propertyNamesSet = new HashSet<String>();
    LinkedHashMap<String, Object> propertyTypesMap = new LinkedHashMap<String, Object>();
    for (int i = 0; i < propertyNames.length; i++) {
        String propertyName = propertyNames[i];
        if (propertyNamesSet.contains(propertyName)) { // duplicate prop check
            throw new ConfigurationException(
                    "Property '" + propertyName + "' is listed twice in the type definition");
        }
        propertyNamesSet.add(propertyName);
        propertyTypesMap.put(propertyName, propertyTypes[i]);
    }
    return propertyTypesMap;
}

From source file:com.nextdoor.bender.ipc.s3.S3TransporterTest.java

@Test
public void testPartitioned() throws TransportException, IllegalStateException, IOException {
    /*/*from   w w w  . j av  a  2  s  .  co  m*/
     * Create mock client, requets, and replies
     */
    AmazonS3Client mockClient = getMockClient();

    /*
     * Fill buffer with mock data
     */
    S3TransportBuffer buffer = new S3TransportBuffer(1000, false, new S3TransportSerializer());
    InternalEvent mockIevent = mock(InternalEvent.class);
    doReturn("foo").when(mockIevent).getSerialized();

    /*
     * Create transport
     */
    Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0);
    S3Transport transport = new S3Transport(mockClient, "bucket", "basepath", false, multiPartUploads);

    /*
     * Do actual test
     */
    buffer.add(mockIevent);
    LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>();
    partitions.put(S3Transport.FILENAME_KEY, "a_filename");
    partitions.put("day", "01");
    partitions.put("hour", "23");

    ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class);
    transport.sendBatch(buffer, partitions, new TestContext());
    verify(mockClient).uploadPart(argument.capture());

    /*
     * Check results
     */
    assertEquals("bucket", argument.getValue().getBucketName());
    assertEquals("basepath/day=01/hour=23/a_filename", argument.getValue().getKey());
    assertEquals(1, argument.getValue().getPartNumber());
    assertEquals(4, argument.getValue().getPartSize()); // foo\n
    assertEquals("123", argument.getValue().getUploadId());
}

From source file:com.nextdoor.bender.ipc.s3.S3TransporterTest.java

@Test
public void testCompressedPartitoned() throws TransportException, IllegalStateException, IOException {
    /*/*from   w  ww . ja v a2 s . c o m*/
     * Create mock client, requets, and replies
     */
    AmazonS3Client mockClient = getMockClient();

    /*
     * Fill buffer with mock data
     */
    S3TransportBuffer buffer = new S3TransportBuffer(1000, true, new S3TransportSerializer());
    InternalEvent mockIevent = mock(InternalEvent.class);
    doReturn("foo").when(mockIevent).getSerialized();

    /*
     * Create transport
     */
    Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0);
    S3Transport transport = new S3Transport(mockClient, "bucket", "basepath", true, multiPartUploads);

    /*
     * Do actual test
     */
    buffer.add(mockIevent);
    LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>();
    partitions.put(S3Transport.FILENAME_KEY, "a_filename");
    partitions.put("day", "01");
    partitions.put("hour", "23");

    ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class);
    transport.sendBatch(buffer, partitions, new TestContext());
    verify(mockClient).uploadPart(argument.capture());

    /*
     * Check results
     */
    assertEquals("bucket", argument.getValue().getBucketName());
    assertEquals("basepath/day=01/hour=23/a_filename.bz2", argument.getValue().getKey());
    assertEquals(1, argument.getValue().getPartNumber());
    assertEquals(3, argument.getValue().getPartSize());
    assertEquals("123", argument.getValue().getUploadId());
}

From source file:net.jradius.webservice.WebServiceListener.java

private Map<String, String> getHeaders(DataInputStream reader) throws IOException {
    LinkedHashMap<String, String> map = new LinkedHashMap<String, String>();
    String line;//from  w ww.  j  a v a2s. co  m
    do {
        line = reader.readLine().trim();
        if (line != null && line.length() > 0) {
            String[] parts = line.split(":", 2);
            if (parts != null && parts.length == 2) {
                map.put(parts[0].toLowerCase().trim(), parts[1].trim());
            } else
                break;
        } else
            break;
    } while (true);

    return map;
}

From source file:com.nextdoor.bender.ipc.s3.S3TransporterTest.java

@Test(expected = TransportException.class)
public void testAmazonClientException() throws TransportException, IllegalStateException, IOException {
    /*//from w  ww . ja  v a  2 s .c om
     * Create mock client, requets, and replies
     */
    AmazonS3Client mockClient = mock(AmazonS3Client.class);
    UploadPartResult uploadResult = new UploadPartResult();
    uploadResult.setETag("foo");
    doThrow(new AmazonClientException("expected")).when(mockClient).uploadPart(any(UploadPartRequest.class));

    InitiateMultipartUploadResult initUploadResult = new InitiateMultipartUploadResult();
    initUploadResult.setUploadId("123");
    doReturn(initUploadResult).when(mockClient)
            .initiateMultipartUpload(any(InitiateMultipartUploadRequest.class));

    /*
     * Fill buffer with mock data
     */
    S3TransportBuffer buffer = new S3TransportBuffer(1000, false, new S3TransportSerializer());
    InternalEvent mockIevent = mock(InternalEvent.class);
    doReturn("foo").when(mockIevent).getSerialized();

    /*
     * Create transport
     */
    Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0);
    S3Transport transport = new S3Transport(mockClient, "bucket", "basepath", false, multiPartUploads);

    /*
     * Do actual test
     */
    buffer.add(mockIevent);
    LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>();
    partitions.put(S3Transport.FILENAME_KEY, "a_filename");

    ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class);
    try {
        transport.sendBatch(buffer, partitions, new TestContext());
    } catch (Exception e) {
        assertEquals(e.getCause().getClass(), AmazonClientException.class);
        throw e;
    }
}

From source file:com.nextdoor.bender.ipc.s3.S3TransporterTest.java

@Test
public void testMultipleUploads() throws TransportException, IllegalStateException, IOException {
    /*//from w  ww . j a va  2s.  c  o  m
     * Create mock client, requets, and replies
     */
    AmazonS3Client mockClient = getMockClient();

    /*
     * Fill buffer with mock data
     */
    S3TransportBuffer buffer1 = new S3TransportBuffer(1000, false, new S3TransportSerializer());
    S3TransportBuffer buffer2 = new S3TransportBuffer(1000, false, new S3TransportSerializer());
    InternalEvent mockIevent = mock(InternalEvent.class);
    doReturn("foo").doReturn("bar1").when(mockIevent).getSerialized();
    buffer1.add(mockIevent);
    buffer2.add(mockIevent);

    /*
     * Create transport
     */
    Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0);
    S3Transport transport = new S3Transport(mockClient, "bucket", "basepath", false, multiPartUploads);

    /*
     * Do actual test
     */
    LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>();
    partitions.put(S3Transport.FILENAME_KEY, "a_filename");
    ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class);

    transport.sendBatch(buffer1, partitions, new TestContext());
    transport.sendBatch(buffer2, partitions, new TestContext());

    verify(mockClient, times(2)).uploadPart(argument.capture());

    List<UploadPartRequest> arguments = argument.getAllValues();

    assertEquals(1, arguments.get(0).getPartNumber());
    assertEquals(4, arguments.get(0).getPartSize()); // foo\n
    assertEquals("123", arguments.get(0).getUploadId());

    assertEquals(2, arguments.get(1).getPartNumber());
    assertEquals(5, arguments.get(1).getPartSize()); // bar1\n
    assertEquals("123", arguments.get(1).getUploadId());
}

From source file:asu.edu.msse.gpeddabu.moviedescriptions.MovieLibrary.java

MovieLibrary(String jsonString) {
    if (movieMap == null) {
        movieMap = new LinkedHashMap<>();
        JSONObject jsonObj;/*from w  w w  .j  a v  a 2s  .  co m*/
        LinkedHashMap<String, JSONObject> movieDescr;
        try {
            jsonObj = new JSONObject(jsonString);
            Iterator<String> iter = jsonObj.keys();
            while (iter.hasNext()) {
                String name = iter.next();
                JSONObject movie = (JSONObject) jsonObj.get(name);
                if (movieMap.containsKey(movie.get("Genre").toString())) {
                    movieDescr = movieMap.get(movie.get("Genre").toString());
                    movieDescr.put(name, movie);
                } else {
                    movieDescr = new LinkedHashMap<String, JSONObject>();
                    movieDescr.put(name, movie);
                    movieMap.put(movie.get("Genre").toString(), movieDescr);
                }

            }
        } catch (Exception e) {
            android.util.Log.e(e.toString(), "Error in converting JSON String to JSON Object");
        }
    }
}

From source file:com.logsniffer.reader.support.AbstractPatternLineReader.java

@Override
public LinkedHashMap<String, FieldBaseTypes> getFieldTypes() throws FormatException {
    final LinkedHashMap<String, FieldBaseTypes> fields = new LinkedHashMap<String, FieldBaseTypes>();
    fields.put(LogEntry.FIELD_RAW_CONTENT, FieldBaseTypes.STRING);
    return fields;
}