List of usage examples for java.util LinkedHashMap put
V put(K key, V value);
From source file:com.streamsets.pipeline.lib.generator.delimited.TestDelimitedDataGenerator.java
@Test public void testGeneratorListMapWithHeader() throws Exception { StringWriter writer = new StringWriter(); DataGenerator gen = new DelimitedCharDataGenerator(writer, CsvMode.CSV.getFormat(), CsvHeader.WITH_HEADER, "h", "d", null); LinkedHashMap<String, Field> linkedHashMap = new LinkedHashMap<>(); linkedHashMap.put("firstField", Field.create("sampleValue")); linkedHashMap.put("secondField", Field.create(20)); Field listMapField = Field.createListMap(linkedHashMap); Record record = RecordCreator.create(); record.set(listMapField);/*from w ww .j av a 2s. c o m*/ gen.write(record); gen.close(); Assert.assertEquals("firstField,secondField\r\nsampleValue,20\r\n", writer.toString()); }
From source file:com.streamsets.pipeline.lib.generator.delimited.TestDelimitedDataGenerator.java
@Test public void testGeneratorListMapIgnoreHeader() throws Exception { StringWriter writer = new StringWriter(); DataGenerator gen = new DelimitedCharDataGenerator(writer, CsvMode.CSV.getFormat(), CsvHeader.IGNORE_HEADER, "h", "d", null); LinkedHashMap<String, Field> linkedHashMap = new LinkedHashMap<>(); linkedHashMap.put("firstField", Field.create("sampleValue")); linkedHashMap.put("secondField", Field.create(20)); Field listMapField = Field.createListMap(linkedHashMap); Record record = RecordCreator.create(); record.set(listMapField);/*from w ww .ja v a2 s. c o m*/ gen.write(record); gen.close(); Assert.assertEquals("sampleValue,20\r\n", writer.toString()); }
From source file:com.streamsets.pipeline.lib.generator.delimited.TestDelimitedDataGenerator.java
@Test public void testGeneratorListMapNoHeader() throws Exception { StringWriter writer = new StringWriter(); DataGenerator gen = new DelimitedCharDataGenerator(writer, CsvMode.CSV.getFormat(), CsvHeader.NO_HEADER, "h", "d", null); LinkedHashMap<String, Field> linkedHashMap = new LinkedHashMap<>(); linkedHashMap.put("firstField", Field.create("sampleValue")); linkedHashMap.put("secondField", Field.create(20)); Field listMapField = Field.createListMap(linkedHashMap); Record record = RecordCreator.create(); record.set(listMapField);//from w w w. j av a 2 s . c om gen.write(record); gen.close(); Assert.assertEquals("sampleValue,20\r\n", writer.toString()); }
From source file:com.intel.iotkitlib.LibModules.UserManagement.java
public boolean changePassword(String emailAddress, String currentPassword, String newPassword) throws JSONException { if (emailAddress == null || currentPassword == null || newPassword == null) { Log.d(TAG, "email or currentPassword or newPassword cannot be empty"); return false; }//w w w . j a va 2 s . c o m String body; if ((body = createBodyForChangePassword(currentPassword, newPassword)) == null) { return false; } //initiating put for change password request HttpPutTask changePassword = new HttpPutTask(new HttpTaskHandler() { @Override public void taskResponse(int responseCode, String response) { Log.d(TAG, String.valueOf(responseCode)); Log.d(TAG, response); statusHandler.readResponse(responseCode, response); } }); changePassword.setHeaders(basicHeaderList); changePassword.setRequestBody(body); LinkedHashMap<String, String> linkedHashMap = new LinkedHashMap<String, String>(); linkedHashMap.put("email", emailAddress); String url = objIotKit.prepareUrl(objIotKit.changePassword, linkedHashMap); return super.invokeHttpExecuteOnURL(url, changePassword, "change password"); }
From source file:com.xyz.system.service.impl.DefinitionSourceFactoryBean.java
/** * resourceDetailService??LinkedHashMap<String, String>?URL?? * DefaultFilterInvocationDefinitionSource?LinkedHashMap<RequestKey, ConfigAttributeDefinition>?. *//*from w ww. ja va2s .c o m*/ protected LinkedHashMap<RequestKey, Collection<ConfigAttribute>> buildRequestMap() throws Exception { LinkedHashMap<String, String> srcMap = securityManager.getRequestMap(); LinkedHashMap<RequestKey, Collection<ConfigAttribute>> distMap = new LinkedHashMap<RequestKey, Collection<ConfigAttribute>>(); for (Map.Entry<String, String> entry : srcMap.entrySet()) { RequestKey key = new RequestKey(entry.getKey(), null); if (StringUtils.isNotBlank(entry.getValue())) { distMap.put(key, SecurityConfig.createListFromCommaDelimitedString(entry.getValue())); } } return distMap; }
From source file:service.TagService.java
public LinkedHashMap<Long, Tag> getAllActiveTagsMap(Long pkId) { LinkedHashMap<Long, Tag> res = new LinkedHashMap(); for (Tag tag : getAllActiveTags(pkId)) { res.put(tag.getId(), tag); }/*from w ww . j av a2 s . com*/ return res; }
From source file:com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyParameterSensitivity.java
/** * Create a copy of the object with all the sensitivities multiplied by a common factor. * @param factor The factor.// w w w. jav a2s.c om * @return The multiplied sensitivity. */ public MultipleCurrencyParameterSensitivity multipliedBy(final double factor) { final MatrixAlgebra algebra = MatrixAlgebraFactory.COMMONS_ALGEBRA; final LinkedHashMap<Pair<String, Currency>, DoubleMatrix1D> result = new LinkedHashMap<>(); for (final Pair<String, Currency> nameCcy : _sensitivity.keySet()) { result.put(nameCcy, (DoubleMatrix1D) algebra.scale(_sensitivity.get(nameCcy), factor)); } return new MultipleCurrencyParameterSensitivity(result); }
From source file:com.itemanalysis.jmetrik.file.JmetrikOutputWriter.java
private void saveCsvFile(File outputFile, Outputter outputter) throws IOException { ArrayList<VariableAttributes> variables = outputter.getColumnAttributes(); LinkedHashMap<VariableName, VariableAttributes> variableAttributeMap = new LinkedHashMap<VariableName, VariableAttributes>(); String[] header = new String[variables.size()]; int hIndex = 0; for (VariableAttributes v : variables) { variableAttributeMap.put(v.getName(), v); header[hIndex] = v.getName().toString(); hIndex++;/*from w ww . jav a 2s . c o m*/ } Writer writer = null; CSVPrinter printer = null; try { //Ensure that file is a csv file. String fname = FilenameUtils.removeExtension(outputFile.getAbsolutePath()); outputFile = new File(fname + ".csv"); writer = new OutputStreamWriter(new FileOutputStream(outputFile)); printer = new CSVPrinter(writer, CSVFormat.DEFAULT.withCommentMarker('#').withHeader(header)); Iterator<Object[][]> iter = outputter.iterator(); Object[][] outputChunk = null; while (iter.hasNext()) { outputChunk = iter.next(); for (int i = 0; i < outputChunk.length; i++) { printer.printRecord(outputChunk[i]); } } } catch (IOException ex) { throw ex; } finally { printer.close(); } }
From source file:com.mongodb.hadoop.pig.MongoStorage.java
protected void writeField(BasicDBObjectBuilder builder, ResourceSchema.ResourceFieldSchema field, Object d) throws IOException { // If the field is missing or the value is null, write a null if (d == null) { builder.add(field.getName(), d); return;//from w w w.j ava 2s . c om } ResourceSchema s = field.getSchema(); // Based on the field's type, write it out switch (field.getType()) { case DataType.INTEGER: builder.add(field.getName(), (Integer) d); return; case DataType.LONG: builder.add(field.getName(), (Long) d); return; case DataType.FLOAT: builder.add(field.getName(), (Float) d); return; case DataType.DOUBLE: builder.add(field.getName(), (Double) d); return; case DataType.BYTEARRAY: builder.add(field.getName(), d.toString()); return; case DataType.CHARARRAY: builder.add(field.getName(), (String) d); return; // Given a TUPLE, create a Map so BSONEncoder will eat it case DataType.TUPLE: if (s == null) { throw new IOException("Schemas must be fully specified to use " + "this storage function. No schema found for field " + field.getName()); } ResourceSchema.ResourceFieldSchema[] fs = s.getFields(); LinkedHashMap m = new java.util.LinkedHashMap(); for (int j = 0; j < fs.length; j++) { m.put(fs[j].getName(), ((Tuple) d).get(j)); } builder.add(field.getName(), (Map) m); return; // Given a BAG, create an Array so BSONEnconder will eat it. case DataType.BAG: if (s == null) { throw new IOException("Schemas must be fully specified to use " + "this storage function. No schema found for field " + field.getName()); } fs = s.getFields(); if (fs.length != 1 || fs[0].getType() != DataType.TUPLE) { throw new IOException("Found a bag without a tuple " + "inside!"); } // Drill down the next level to the tuple's schema. s = fs[0].getSchema(); if (s == null) { throw new IOException("Schemas must be fully specified to use " + "this storage function. No schema found for field " + field.getName()); } fs = s.getFields(); ArrayList a = new ArrayList<Map>(); for (Tuple t : (DataBag) d) { LinkedHashMap ma = new java.util.LinkedHashMap(); for (int j = 0; j < fs.length; j++) { ma.put(fs[j].getName(), ((Tuple) t).get(j)); } a.add(ma); } builder.add(field.getName(), a); return; case DataType.MAP: Map map = (Map) d; for (Object key : map.keySet()) { builder.add(key.toString(), map.get(key)); } return; } }
From source file:edu.jhuapl.openessence.web.util.ControllerUtils.java
public static LinkedHashMap<String, ChartData> getSortedAndLimitedChartDataMap( LinkedHashMap<String, ChartData> map, Integer limit, String limitLabel) { //test if we need to trim if (limit <= 0 || limit >= map.size()) { return map; }/*from w ww . j av a 2 s . co m*/ //sort by value Map<String, ChartData> sortedMap = ControllerUtils.getSortedByChartDataMap(map); //limit and combine results Map<String, ChartData> sortedLimitedMap = ControllerUtils.getLimitedChartDataMap(sortedMap, limit, limitLabel); //put the original sort order back (minus the values combined) LinkedHashMap<String, ChartData> originalSortResultMap = new LinkedHashMap<String, ChartData>(limit); LinkedHashMap<String, ChartData> passedValuesMap = new LinkedHashMap<String, ChartData>(map.size()); int i = 0; for (String key : map.keySet()) { if (i < limit) { if (sortedLimitedMap.containsKey(key)) { ChartData value = sortedLimitedMap.get(key); //if value is not null/zero, add it and increment if (value != null && value.getCount() != null && !Double.isNaN(value.getCount()) && value.getCount() > 0) { originalSortResultMap.put(key, value); i++; } else { //put it in a list of passed up values for inclusion at the end passedValuesMap.put(key, value); } } } } //if we still have room after adding all sorted non zero values... fill the rest with passed values if (i < limit) { for (String key : passedValuesMap.keySet()) { if (i < limit) { originalSortResultMap.put(key, passedValuesMap.get(key)); i++; } } } //add combined field if it is not null (indicates it was used even if the value is 0) ChartData cVal = sortedLimitedMap.get(limitLabel); if (cVal != null && cVal.getCount() != null && !Double.isNaN(cVal.getCount())) { originalSortResultMap.put(limitLabel, cVal); } return originalSortResultMap; }