List of usage examples for java.util LinkedHashMap put
V put(K key, V value);
From source file:com.skysql.manager.api.Schedule.java
/** * Gets the schedule list for a node./* w ww . j a va 2 s .com*/ * * @param nodeID the node id * @return the schedule list for node */ public LinkedHashMap<String, ScheduleRecord> getScheduleForNode(String nodeID) { LinkedHashMap<String, ScheduleRecord> scheduleForNode = new LinkedHashMap<String, ScheduleRecord>(); for (String key : scheduleList.keySet()) { ScheduleRecord record = scheduleList.get(key); if (record.getNodeID().equals(nodeID)) { scheduleForNode.put(key, record); } } return scheduleForNode; }
From source file:com.qwazr.database.TableManager.java
public TableRequestResult query(String tableName, TableRequest request) throws ServerException, DatabaseException, IOException { rwl.r.lock();//from ww w . j av a 2s .c o m try { long start = request.start == null ? 0 : request.start; long rows = request.rows == null ? Long.MAX_VALUE : request.rows; Table table = getTable(tableName); if (request.query == null) throw new ServerException(Response.Status.NOT_ACCEPTABLE, "The query part is missing"); Map<String, Map<String, CollectorInterface.LongCounter>> counters = null; if (request.counters != null && !request.counters.isEmpty()) { counters = new LinkedHashMap<String, Map<String, CollectorInterface.LongCounter>>(); for (String col : request.counters) { Map<String, CollectorInterface.LongCounter> termCount = new HashMap<String, CollectorInterface.LongCounter>(); counters.put(col, termCount); } } Query query = Query.prepare(request.query, null); RoaringBitmap docBitset = table.query(query, counters).finalBitmap; if (docBitset == null || docBitset.isEmpty()) return new TableRequestResult(null); long count = docBitset.getCardinality(); TableRequestResult result = new TableRequestResult(count); table.getRows(docBitset, request.columns, start, rows, result.rows); if (counters != null) { for (Map.Entry<String, Map<String, CollectorInterface.LongCounter>> countersEntry : counters .entrySet()) { LinkedHashMap<String, Long> counter = new LinkedHashMap<String, Long>(); for (Map.Entry<String, CollectorInterface.LongCounter> counterEntry : countersEntry.getValue() .entrySet()) counter.put(counterEntry.getKey(), counterEntry.getValue().count); result.counters.put(countersEntry.getKey(), counter); } } return result; } finally { rwl.r.unlock(); } }
From source file:ca.sfu.federation.model.InputTable.java
/** * Create an index by name for the current inputs. * @return Map.// w ww.ja v a2s. c om */ private LinkedHashMap getInputIndex() { // init LinkedHashMap index = new LinkedHashMap(); // add all inputs to the index Iterator e = this.inputs.iterator(); while (e.hasNext()) { Input input = (Input) e.next(); index.put(input.getName(), input); } // return result return index; }
From source file:com.rockagen.gnext.service.spring.security.extension.JdbcFilterInvocationSecurityMetadataSourceFactoryBean.java
/** * Builds the request map./*w w w . j a v a2 s. c om*/ * <p>return LinkedHashMap< {@link RequestMatcher}, Collection< {@link ConfigAttribute}>> </p> * * @return requestMap order-preserving map of request definitions to attribute lists */ protected LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>> buildRequestMap() { LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>> requestMap = new LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>>(); Map<String, String> resourceMap = findResources(); for (Map.Entry<String, String> entry : resourceMap.entrySet()) { RequestMatcher key = new AntPathRequestMatcher(entry.getKey()); requestMap.put(key, SecurityConfig.createListFromCommaDelimitedString(entry.getValue())); } return requestMap; }
From source file:com.opengamma.analytics.financial.interestrate.capletstripping.CapletStrippingJacobian.java
public CapletStrippingJacobian(final List<CapFloor> caps, final YieldCurveBundle yieldCurves, final LinkedHashMap<String, double[]> knotPoints, final LinkedHashMap<String, Interpolator1D> interpolators, final LinkedHashMap<String, ParameterLimitsTransform> parameterTransforms, final LinkedHashMap<String, InterpolatedDoublesCurve> knownParameterTermSturctures) { Validate.notNull(caps, "caps null"); Validate.notNull(knotPoints, "null node points"); Validate.notNull(interpolators, "null interpolators"); Validate.isTrue(knotPoints.size() == interpolators.size(), "size mismatch between nodes and interpolators"); _knownParameterTermSturctures = knownParameterTermSturctures; final LinkedHashMap<String, Interpolator1D> transInterpolators = new LinkedHashMap<>(); final Set<String> names = interpolators.keySet(); _parameterNames = names;/*from w w w. ja v a 2 s . c o m*/ for (final String name : names) { final Interpolator1D temp = new TransformedInterpolator1D(interpolators.get(name), parameterTransforms.get(name)); transInterpolators.put(name, temp); } _capPricers = new ArrayList<>(caps.size()); for (final CapFloor cap : caps) { _capPricers.add(new CapFloorPricer(cap, yieldCurves)); } _interpolators = transInterpolators; _curveBuilder = new InterpolatedCurveBuildingFunction(knotPoints, transInterpolators); _dataBundleBuilder = new Interpolator1DDataBundleBuilderFunction(knotPoints, transInterpolators); }
From source file:com.streamsets.pipeline.lib.parser.delimited.DelimitedCharDataParser.java
protected Record createRecord(long offset, String[] columns) throws DataParserException { Record record = context.createRecord(readerId + "::" + offset); if (recordType == CsvRecordType.LIST) { List<Field> row = new ArrayList<>(); for (int i = 0; i < columns.length; i++) { Map<String, Field> cell = new HashMap<>(); Field header = (headers != null) ? headers.get(i) : null; if (header != null) { cell.put("header", header); }//from ww w.j av a 2s. com Field value = Field.create(columns[i]); cell.put("value", value); row.add(Field.create(cell)); } record.set(Field.create(row)); } else { LinkedHashMap<String, Field> listMap = new LinkedHashMap<>(); for (int i = 0; i < columns.length; i++) { String key; Field header = (headers != null) ? headers.get(i) : null; if (header != null) { key = header.getValueAsString(); } else { key = i + ""; } listMap.put(key, Field.create(columns[i])); } record.set(Field.createListMap(listMap)); } return record; }
From source file:com.logsniffer.reader.support.FormattedTextReader.java
@Override public LinkedHashMap<String, FieldBaseTypes> getFieldTypes() throws FormatException { init();//from w ww . j a v a 2 s .c o m final LinkedHashMap<String, FieldBaseTypes> fields = super.getFieldTypes(); if (parsingSpecifiers != null) { for (final Specifier s : parsingSpecifiers) { fields.put(s.getFieldName(), s.getFieldType()); } } return fields; }
From source file:de.roderick.weberknecht.WebSocketHandshake.java
public byte[] getHandshake() { String path = url.getPath();// www . ja v a2 s . co m String host = url.getHost(); if (url.getPort() != -1) { host += ":" + url.getPort(); } LinkedHashMap<String, String> header = new LinkedHashMap<String, String>(); if (this.extraHeaders != null) { for (String fieldName : this.extraHeaders.keySet()) { // Only checks for Field names with the exact same text, // but according to RFC 2616 (HTTP) field names are case-insensitive. if (!header.containsKey(fieldName)) { header.put(fieldName, this.extraHeaders.get(fieldName)); } } } header.put("Host", host); header.put("Upgrade", "websocket"); header.put("Connection", "Upgrade"); header.put("Sec-WebSocket-Version", String.valueOf(WebSocket.getVersion())); header.put("Sec-WebSocket-Key", this.nonce); if (this.protocol != null) { header.put("Sec-WebSocket-Protocol", this.protocol); } String handshake = "GET " + path + " HTTP/1.1\r\n"; handshake += this.generateHeader(header); handshake += "\r\n"; byte[] handshakeBytes = new byte[handshake.getBytes().length]; System.arraycopy(handshake.getBytes(), 0, handshakeBytes, 0, handshake.getBytes().length); return handshakeBytes; }
From source file:edu.cornell.kfs.coa.businessobject.AccountReversionGlobal.java
/** * @see org.kuali.kfs.kns.bo.BusinessObjectBase#toStringMapper() *//*from w w w . ja v a 2 s. c o m*/ protected LinkedHashMap toStringMapperr_RICE20_REFACTORME() { LinkedHashMap stringMapper = new LinkedHashMap(); stringMapper.put(KFSPropertyConstants.DOCUMENT_NUMBER, this.documentNumber); stringMapper.put(KFSPropertyConstants.UNIVERSITY_FISCAL_YEAR, this.universityFiscalYear); return stringMapper; }
From source file:de.helmholtz_muenchen.ibis.ngs.featureCounts.FeatureCountsNodeModel.java
@Override protected LinkedHashMap<String, String> getGUIParameters(final BufferedDataTable[] inData) { LinkedHashMap<String, String> pars = new LinkedHashMap<String, String>(); /********************* SIMPLE PARAMETER ***************************/ if (SET_COUNT_MULTIMAPPED.getBooleanValue()) pars.put(NAME_OF_COUNT_MULTIMAPPED, ""); if (SET_COUNT_OVERLAPPING_MULTI.getBooleanValue()) pars.put(NAME_OF_ASSIGN_MULTI, ""); if (SET_COUNT_FRAGMENTS.getBooleanValue()) pars.put(NAME_OF_COUNT_FRAGMENTS, ""); if (!SET_CHIMERIC_FRAGMENTS.getBooleanValue()) pars.put(NAME_OF_COUNT_CHIMERIC, ""); if (SET_FEATURE_LEVEL.getBooleanValue()) pars.put(NAME_OF_COUNT_ON_FEATURE_LVL, ""); if (SET_THREAD_NUMBER.getIntValue() > 1) pars.put(NAME_OF_THREAD_NUMBER, Integer.toString(SET_THREAD_NUMBER.getIntValue())); pars.put(NAME_OF_ANNOTATION_TYPE, "GTF"); if (annotation_file.endsWith(".saf")) { pars.put(NAME_OF_ANNOTATION_TYPE, "SAF"); }/*from w w w . j a v a2s . co m*/ pars.put(NAME_OF_FEATURE_TYPE, SET_FEATURE_TYPE.getStringValue()); pars.put(NAME_OF_ANNOTATION_FILE, annotation_file); pars.put(NAME_OF_GROUP_FEATURE, SET_GROUP_FEATURE.getStringValue()); /********************* OUTPUT ****************************/ // String outputFolderArgument = getAbsoluteFilename(SET_OUTPUT_FOLDER.getStringValue(), false); // File outDir = new File(outputFolderArgument).getParentFile(); // // create folder, if not already there // if(!outDir.isDirectory()) // outDir.mkdir(); // pars.put(NAME_OF_OUTPUT_FILE, outputFolderArgument); /********************** INPUT BAM/SAM ****************************/ ArrayList<String> inputArgument = new ArrayList<String>(); boolean first = true; String infile; // get input parameter from BAM/SAM selector for (Iterator<DataRow> it = inData[0].iterator(); it.hasNext();) { infile = it.next().getCell(bam_sam_index).toString(); inputArgument.add(infile); if (first) { outfile = SET_OUTPUT_FOLDER.getStringValue(); if (CompatibilityChecker.inputFileNotOk(SET_OUTPUT_FOLDER.getStringValue(), false)) { outfile = new File(infile).getParent(); } if (!outfile.endsWith(File.separator)) { outfile += File.separator; } outfile += new File(infile).getName(); outfile = IO.replaceFileExtension(outfile, ".featureCounts"); first = false; } } // add the input parameter pars.put(" ", StringUtils.join(inputArgument, " ")); // add the outfile pars.put(NAME_OF_OUTPUT_FILE, outfile); // return the GUI parameter return pars; }