List of usage examples for java.util LinkedHashMap put
V put(K key, V value);
From source file:com.nextdoor.bender.ipc.s3.S3TransporterTest.java
@Test public void testCompressedBuffer() throws TransportException, IllegalStateException, IOException { /*//from w w w. j av a2 s . co m * Create mock client, requets, and replies */ AmazonS3Client mockClient = getMockClient(); /* * Capture the InputStream into a ByteArrayOutputStream before the Transport thread closes the * InputStream and makes it unavailable for reading. */ ByteArrayOutputStream captured = new ByteArrayOutputStream(); Answer answer = new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { UploadPartRequest req = invocation.getArgumentAt(0, UploadPartRequest.class); captured.write(req.getInputStream()); return new UploadPartResult(); } }; Mockito.doAnswer(answer).when(mockClient).uploadPart(any(UploadPartRequest.class)); /* * Fill buffer with mock data */ S3TransportBuffer buffer = new S3TransportBuffer(1000, true, new S3TransportSerializer()); InternalEvent mockIevent = mock(InternalEvent.class); doReturn("foo").when(mockIevent).getSerialized(); /* * Create transport */ Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0); S3Transport transport = new S3Transport(mockClient, "bucket", "basepath", true, multiPartUploads); /* * Do actual test */ buffer.add(mockIevent); LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>(); partitions.put(S3Transport.FILENAME_KEY, "a_filename"); ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class); buffer.close(); transport.sendBatch(buffer, partitions, new TestContext()); verify(mockClient).uploadPart(argument.capture()); /* * Check results */ assertEquals("bucket", argument.getValue().getBucketName()); assertEquals("basepath/a_filename.bz2", argument.getValue().getKey()); assertEquals(1, argument.getValue().getPartNumber()); assertEquals(40, argument.getValue().getPartSize()); assertEquals("123", argument.getValue().getUploadId()); /* * Convert the actual InputStream from the client into a ByteArrayOutputStream which can be read * and verified. */ byte[] actualBytes = captured.toByteArray(); byte[] expectedBytes = { 66, 90, 104, 57, 49, 65, 89, 38, 83, 89, 118, -10, -77, -27, 0, 0, 0, -63, 0, 0, 16, 1, 0, -96, 0, 48, -52, 12, -62, 12, 46, -28, -118, 112, -95, 32, -19, -19, 103, -54 }; assertArrayEquals(expectedBytes, actualBytes); }
From source file:com.nextdoor.bender.ipc.s3.S3TransporterTest.java
@Test public void testCompressed() throws TransportException, IllegalStateException, IOException { /*//from w w w .j av a2 s . co m * Create mock client, requets, and replies */ AmazonS3Client mockClient = getMockClient(); /* * Capture the InputStream into a ByteArrayOutputStream before the Transport thread closes the * InputStream and makes it unavailable for reading. */ ByteArrayOutputStream captured = new ByteArrayOutputStream(); Answer answer = new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { UploadPartRequest req = invocation.getArgumentAt(0, UploadPartRequest.class); captured.write(req.getInputStream()); return new UploadPartResult(); } }; Mockito.doAnswer(answer).when(mockClient).uploadPart(any(UploadPartRequest.class)); /* * Fill buffer with mock data */ S3TransportBuffer buffer = new S3TransportBuffer(1000, false, new S3TransportSerializer()); InternalEvent mockIevent = mock(InternalEvent.class); doReturn("foo").when(mockIevent).getSerialized(); /* * Create transport */ Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0); S3Transport transport = new S3Transport(mockClient, "bucket", "basepath", true, multiPartUploads); /* * Do actual test */ buffer.add(mockIevent); LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>(); partitions.put(S3Transport.FILENAME_KEY, "a_filename"); ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class); buffer.close(); transport.sendBatch(buffer, partitions, new TestContext()); verify(mockClient).uploadPart(argument.capture()); /* * Check results */ assertEquals("bucket", argument.getValue().getBucketName()); assertEquals("basepath/a_filename.bz2", argument.getValue().getKey()); assertEquals(1, argument.getValue().getPartNumber()); assertEquals(40, argument.getValue().getPartSize()); assertEquals("123", argument.getValue().getUploadId()); /* * Convert the actual InputStream from the client into a ByteArrayOutputStream which can be read * and verified. */ byte[] actualBytes = captured.toByteArray(); byte[] expectedBytes = { 66, 90, 104, 57, 49, 65, 89, 38, 83, 89, 118, -10, -77, -27, 0, 0, 0, -63, 0, 0, 16, 1, 0, -96, 0, 48, -52, 12, -62, 12, 46, -28, -118, 112, -95, 32, -19, -19, 103, -54 }; assertArrayEquals(expectedBytes, actualBytes); }
From source file:com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyParameterSensitivity.java
/** * Create a copy of the sensitivity and add a given sensitivity to it. * @param other The sensitivity to add./*from w w w .j a v a 2 s . com*/ * @return The total sensitivity. */ public MultipleCurrencyParameterSensitivity plus(final MultipleCurrencyParameterSensitivity other) { ArgumentChecker.notNull(other, "Sensitivity to add"); final MatrixAlgebra algebra = MatrixAlgebraFactory.COMMONS_ALGEBRA; final LinkedHashMap<Pair<String, Currency>, DoubleMatrix1D> result = new LinkedHashMap<>(); result.putAll(_sensitivity); for (final Map.Entry<Pair<String, Currency>, DoubleMatrix1D> entry : other.getSensitivities().entrySet()) { final Pair<String, Currency> nameCcy = entry.getKey(); if (result.containsKey(nameCcy)) { result.put(nameCcy, (DoubleMatrix1D) algebra.add(result.get(nameCcy), entry.getValue())); } else { result.put(nameCcy, entry.getValue()); } } return new MultipleCurrencyParameterSensitivity(result); }
From source file:hydrograph.ui.propertywindow.propertydialog.PropertyDialog.java
private void savePropertiesInComponentModel(AbstractWidget eltWidget, LinkedHashMap<String, Object> properties) { LinkedHashMap<String, Object> tempPropert = properties; LinkedHashMap<String, Object> componentConfigurationProperties = componentProperties .getComponentConfigurationProperties(); for (String propName : tempPropert.keySet()) { componentConfigurationProperties.put(propName, tempPropert.get(propName)); }//w w w . j a v a2s . c o m }
From source file:com.opengamma.analytics.financial.curve.CurveConstructionXCcyTest.java
private static Pair<YieldCurveBundle, CurveBuildingBlockBundle> makeCurves( final InstrumentDefinition<?>[][][] definitions, GeneratorYDCurve[][] curveGenerators, String[][] curveNames, YieldCurveBundle knownData, final AbstractInstrumentDerivativeVisitor<YieldCurveBundle, Double> calculator, final AbstractInstrumentDerivativeVisitor<YieldCurveBundle, InterestRateCurveSensitivity> sensitivityCalculator, boolean withToday, int block) { int nbBlocks = curveGenerators.length; YieldCurveBundle knownSoFarData = knownData.copy(); List<InstrumentDerivative> instrumentsSoFar = new ArrayList<InstrumentDerivative>(); LinkedHashMap<String, GeneratorYDCurve> generatorsSoFar = new LinkedHashMap<String, GeneratorYDCurve>(); LinkedHashMap<String, Pair<CurveBuildingBlock, DoubleMatrix2D>> unitBundleSoFar = new LinkedHashMap<String, Pair<CurveBuildingBlock, DoubleMatrix2D>>(); List<Double> parametersSoFar = new ArrayList<Double>(); LinkedHashMap<String, Pair<Integer, Integer>> unitMap = new LinkedHashMap<String, Pair<Integer, Integer>>(); int start = 0; for (int loopunit = 0; loopunit < nbBlocks; loopunit++) { int startBlock = 0; InstrumentDerivative[] instruments = convert(curveNames, definitions[loopunit], loopunit, withToday, block);//www . ja v a2s . co m instrumentsSoFar.addAll(Arrays.asList(instruments)); InstrumentDerivative[] instrumentsSoFarArray = instrumentsSoFar.toArray(new InstrumentDerivative[0]); double[] initGuess = initialGuess(definitions[loopunit]); LinkedHashMap<String, GeneratorYDCurve> gen = new LinkedHashMap<String, GeneratorYDCurve>(); int[] nbIns = new int[curveGenerators[loopunit].length]; for (int loopcurve = 0; loopcurve < curveGenerators[loopunit].length; loopcurve++) { nbIns[loopcurve] = definitions[loopunit][loopcurve].length; InstrumentDerivative[] insCurve = new InstrumentDerivative[nbIns[loopcurve]]; System.arraycopy(instruments, startBlock, insCurve, 0, nbIns[loopcurve]); GeneratorYDCurve tmp = curveGenerators[loopunit][loopcurve].finalGenerator(insCurve); gen.put(curveNames[loopunit][loopcurve], tmp); generatorsSoFar.put(curveNames[loopunit][loopcurve], tmp); unitMap.put(curveNames[loopunit][loopcurve], new ObjectsPair<Integer, Integer>(start + startBlock, nbIns[loopcurve])); startBlock += nbIns[loopcurve]; } Pair<YieldCurveBundle, Double[]> unitCal = makeUnit(instruments, initGuess, gen, knownSoFarData, calculator, sensitivityCalculator); parametersSoFar.addAll(Arrays.asList(unitCal.getSecond())); DoubleMatrix2D[] mat = makeCurveMatrix(instrumentsSoFarArray, generatorsSoFar, start, nbIns, parametersSoFar.toArray(new Double[0]), knownData, sensitivityCalculator); for (int loopcurve = 0; loopcurve < curveGenerators[loopunit].length; loopcurve++) { unitBundleSoFar.put(curveNames[loopunit][loopcurve], new ObjectsPair<CurveBuildingBlock, DoubleMatrix2D>(new CurveBuildingBlock(unitMap), mat[loopcurve])); } knownSoFarData.addAll(unitCal.getFirst()); start = start + startBlock; } return new ObjectsPair<YieldCurveBundle, CurveBuildingBlockBundle>(knownSoFarData, new CurveBuildingBlockBundle(unitBundleSoFar)); }
From source file:com.opengamma.analytics.math.curve.InterpolatedCurveBuildingFunction.java
public LinkedHashMap<String, InterpolatedDoublesCurve> evaluate(DoubleMatrix1D x) { Validate.notNull(x, "null data x"); Validate.isTrue(_nNodes == x.getNumberOfElements(), "x wrong length"); LinkedHashMap<String, InterpolatedDoublesCurve> res = new LinkedHashMap<String, InterpolatedDoublesCurve>(); int index = 0; for (final String name : _interpolators.keySet()) { final Interpolator1D interpolator = _interpolators.get(name); final double[] nodes = _knotPoints.get(name); final double[] values = Arrays.copyOfRange(x.getData(), index, index + nodes.length); index += nodes.length;/*from www .j av a 2 s .c o m*/ InterpolatedDoublesCurve curve = InterpolatedDoublesCurve.from(nodes, values, interpolator); res.put(name, curve); } return res; }
From source file:com.espertech.esper.epl.join.plan.NStreamOuterQueryPlanBuilder.java
private static void addNotYetNavigated(int streamNo, int numStreams, LinkedHashMap<Integer, int[]> substreamsPerStream, NStreamQueryPlanBuilder.BestChainResult bestChain) { // sum up all substreams (the query plan for each stream: nested iteration or cardinal) Set<Integer> streams = new HashSet<Integer>(); streams.add(streamNo);/*from w ww .j av a 2s . c om*/ recursiveAdd(streamNo, streamNo, substreamsPerStream, streams, false); // we are done, all have navigated if (streams.size() == numStreams) { return; } int previous = streamNo; for (int stream : bestChain.getChain()) { if (streams.contains(stream)) { previous = stream; continue; } // add node as a nested join to the previous stream int[] substreams = substreamsPerStream.get(previous); if (substreams == null) { substreams = new int[0]; } int[] added = CollectionUtil.addValue(substreams, stream); substreamsPerStream.put(previous, added); if (!substreamsPerStream.containsKey(stream)) { substreamsPerStream.put(stream, new int[0]); } previous = stream; } }
From source file:org.lokra.seaweedfs.core.FileTemplate.java
/** * Save files by stream map./* www .j a va2s .com*/ * * @param streamMap Map of file name and file stream. * @param contentType File content type. * @return Files status. * @throws IOException Http connection is fail or server response within some error message. */ public LinkedHashMap<String, FileHandleStatus> saveFilesByStreamMap( LinkedHashMap<String, InputStream> streamMap, ContentType contentType) throws IOException { // Assign file key final AssignFileKeyParams params = new AssignFileKeyParams(assignFileKeyParams.getReplication(), streamMap.size(), assignFileKeyParams.getDataCenter(), assignFileKeyParams.getTtl(), assignFileKeyParams.getCollection()); final AssignFileKeyResult assignFileKeyResult = masterWrapper.assignFileKey(params); String uploadUrl; if (usingPublicUrl) uploadUrl = assignFileKeyResult.getPublicUrl(); else uploadUrl = assignFileKeyResult.getUrl(); // Upload file LinkedHashMap<String, FileHandleStatus> resultMap = new LinkedHashMap<String, FileHandleStatus>(); int index = 0; for (String fileName : streamMap.keySet()) { if (index == 0) resultMap.put(fileName, new FileHandleStatus(assignFileKeyResult.getFid(), volumeWrapper.uploadFile(uploadUrl, assignFileKeyResult.getFid(), fileName, streamMap.get(fileName), timeToLive, contentType))); else resultMap.put(fileName, new FileHandleStatus(assignFileKeyResult.getFid() + "_" + String.valueOf(index), volumeWrapper.uploadFile(uploadUrl, assignFileKeyResult.getFid() + "_" + String.valueOf(index), fileName, streamMap.get(fileName), timeToLive, contentType))); index++; } return resultMap; }
From source file:org.dataconservancy.packaging.tool.impl.PackageStateSerializationIT.java
/** * As configured in production, the {@link AnnotationDrivenPackageStateSerializer} should be encoding characters * using UTF-8, no matter what the platform default is. * * @throws Exception/*from w w w . j a v a2 s. c o m*/ */ @Test public void testUtf8Encoding() throws Exception { PackageState deserializedState = null; ByteArrayOutputStream sink = new ByteArrayOutputStream(); String unicodeString = "S\u00EDSe\u00F1or!"; byte[] unicodeBytes = unicodeString.getBytes(Charset.forName("UTF-8")); // We will not configure this to archive, so that the serialized stream will // not be placed in a zip entry; this makes searching through the sink for a // byte sequence more robust. // TODO this test method should really be somewhere else; or the PackageStateSerializer should expose a setArchive(boolean) method. if (!(underTest instanceof AnnotationDrivenPackageStateSerializer)) { fail("Expected an instance of AnnotationDrivenPackageStateSerializer"); } ((AnnotationDrivenPackageStateSerializer) underTest).setArchive(false); // Package name with a unicode string state.setPackageName(unicodeString); underTest.serialize(state, StreamId.PACKAGE_NAME, sink); assertTrue(contains(unicodeBytes, sink)); deserializedState = new PackageState(); underTest.deserialize(deserializedState, StreamId.PACKAGE_NAME, new ByteArrayInputStream(sink.toByteArray())); assertEquals(unicodeString, deserializedState.getPackageName()); sink.reset(); deserializedState = null; // Package metadata with a unicode string LinkedHashMap<String, List<String>> packageMetadata = new LinkedHashMap<>(); packageMetadata.put("foo", Collections.singletonList(unicodeString)); state.setPackageMetadataList(packageMetadata); underTest.serialize(state, StreamId.PACKAGE_METADATA, sink); assertTrue(contains(unicodeBytes, sink)); deserializedState = new PackageState(); underTest.deserialize(deserializedState, StreamId.PACKAGE_METADATA, new ByteArrayInputStream(sink.toByteArray())); assertEquals(unicodeString, deserializedState.getPackageMetadataList().get("foo").get(0)); sink.reset(); deserializedState = null; // A String user-defined property value with a unicode string Map<URI, List<Property>> userProps = new HashMap<>(); PropertyType type = new PropertyType(); type.setPropertyValueType(PropertyValueType.STRING); Property property = new Property(type); property.setStringValue(unicodeString); userProps.put(URI.create("http://a/uri"), Collections.singletonList(property)); state.setUserSpecifiedProperties(userProps); underTest.serialize(state, StreamId.USER_SPECIFIED_PROPERTIES, sink); assertTrue(contains(unicodeBytes, sink)); deserializedState = new PackageState(); underTest.deserialize(deserializedState, StreamId.USER_SPECIFIED_PROPERTIES, new ByteArrayInputStream(sink.toByteArray())); assertEquals(unicodeString, deserializedState.getUserSpecifiedProperties().get(URI.create("http://a/uri")) .get(0).getStringValue()); sink.reset(); deserializedState = null; // A IPM node with a unicode string Model ipm = ModelFactory.createDefaultModel(); Statement s = ipm.createStatement(ipm.createResource("foo:s"), ipm.createProperty("foo:p"), ipm.createResource(unicodeString)); ipm.add(s); state.setPackageTree(ipm); underTest.serialize(state, StreamId.PACKAGE_TREE, sink); assertTrue(contains(unicodeBytes, sink)); deserializedState = new PackageState(); underTest.deserialize(deserializedState, StreamId.PACKAGE_TREE, new ByteArrayInputStream(sink.toByteArray())); assertTrue(deserializedState.getPackageTree().listObjectsOfProperty(ResourceFactory.createProperty("foo:p")) .next().toString().endsWith(unicodeString)); sink.reset(); deserializedState = null; // A domain object with a unicode string Model objects = ModelFactory.createDefaultModel(); s = objects.createStatement(objects.createResource("bar:s"), objects.createProperty("bar:p"), objects.createResource(unicodeString)); objects.add(s); state.setDomainObjectRDF(objects); underTest.serialize(state, StreamId.DOMAIN_OBJECTS, sink); assertTrue(contains(unicodeBytes, sink)); deserializedState = new PackageState(); underTest.deserialize(deserializedState, StreamId.DOMAIN_OBJECTS, new ByteArrayInputStream(sink.toByteArray())); assertTrue(deserializedState.getDomainObjectRDF() .listObjectsOfProperty(ResourceFactory.createProperty("bar:p")).next().toString() .endsWith(unicodeString)); sink.reset(); deserializedState = null; }
From source file:com.spankingrpgs.scarletmoon.loader.EventLoader.java
private LinkedHashMap<Predicate<GameState>, String> hydrateAutomatedChoices(JsonNode automaticChoices) { if (automaticChoices == null) { return new LinkedHashMap<>(); }/*from w ww. j a v a 2 s . c om*/ Iterator<String> predicateStrings = automaticChoices.fieldNames(); LinkedHashMap<Predicate<GameState>, String> hydratedAutomatedChoices = new LinkedHashMap<>(); while (predicateStrings.hasNext()) { String predicateString = predicateStrings.next(); Predicate<GameState> predicate = parsePredicateString(predicateString); hydratedAutomatedChoices.put(predicate, automaticChoices.get(predicateString).asText()); } return hydratedAutomatedChoices; }