List of usage examples for java.util LinkedHashMap keySet
public Set<K> keySet()
From source file:gtu._work.ui.SqlCreaterUI.java
private void firstRowMakeInsertSqlBtn(ActionEvent evt) { try {// w w w .j a va 2 s. co m String tableName = Validate.notBlank(tableNameText.getText(), "??"); File srcFile = JCommonUtil.filePathCheck(excelFilePathText2.getText(), "?", "xlsx"); File saveFile = JCommonUtil._jFileChooser_selectFileOnly_saveFile(); if (saveFile == null) { JCommonUtil._jOptionPane_showMessageDialog_error("?"); return; } BufferedWriter writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(saveFile), "utf8")); BufferedInputStream bis = new BufferedInputStream(new FileInputStream(srcFile)); XSSFWorkbook xssfWorkbook = new XSSFWorkbook(bis); Sheet sheet = xssfWorkbook.getSheetAt(0); LinkedHashMap<String, String> valueMap = new LinkedHashMap<String, String>(); for (int ii = 0; ii < sheet.getRow(0).getLastCellNum(); ii++) { valueMap.put(formatCellType(sheet.getRow(0).getCell(ii)), ""); } for (int j = 0; j < sheet.getPhysicalNumberOfRows(); j++) { Row row = sheet.getRow(j); LinkedHashMap<String, String> valueMap2 = (LinkedHashMap<String, String>) valueMap.clone(); int ii = 0; for (String key : valueMap2.keySet()) { valueMap2.put(key, formatCellType(row.getCell(ii))); ii++; } appendLog("" + valueMap2); String insertSql = this.fetchInsertSQL(tableName, valueMap2); appendLog("" + insertSql); writer.write(insertSql); writer.newLine(); } bis.close(); writer.flush(); writer.close(); JCommonUtil._jOptionPane_showMessageDialog_info("? : \n" + saveFile); } catch (Exception ex) { JCommonUtil.handleException(ex); } }
From source file:org.kutkaitis.timetable2.timetable.OptimizationResultsBean.java
private int calculateTeachersLectureWindowForIIIAndIV() { int penaltyPoints = 0; List<LinkedHashMap> teachersAllDay = getAllDaysTeacherTimeTable(); for (LinkedHashMap<String, LinkedHashMap> daysTimeTable : teachersAllDay) { Collection<String> teacherNames = daysTimeTable.keySet(); for (String teacherName : teacherNames) { LinkedHashMap<String, String> teachersTimeTableForTheDay = daysTimeTable.get(teacherName); Collection<String> lectureNumbers = teachersTimeTableForTheDay.keySet(); int lectureNumbersSize = lectureNumbers.size(); for (String lectureNumber : lectureNumbers) { String groupNameToSplit = teachersTimeTableForTheDay.get(lectureNumber); String[] splittedGroupNames = groupNameToSplit.split(":"); String groupName = splittedGroupNames[1].trim(); if (StringUtils.equals(groupName, EMPTY_GROUP)) { if (studentsMockData.getTeachersFromIIIAndIV().containsKey(teacherName)) { boolean isNotLastLectures = true; for (int lectNum = Integer .valueOf(lectureNumber); lectNum <= lectureNumbersSize; lectNum++) { String grpNam = teachersTimeTableForTheDay.get(String.valueOf(lectNum)) .split(":")[1].trim(); if (StringUtils.equals(grpNam, EMPTY_GROUP)) { isNotLastLectures = false; } else { isNotLastLectures = true; break; }/*from ww w . ja v a 2 s. co m*/ } if (isNotLastLectures) { penaltyPoints += PenaltyPoints.LECTURE_WINDOW_FOR_TEACHER_III_IV.penaltyPoints(); } } } } } } return penaltyPoints; }
From source file:com.grarak.kerneladiutor.activities.tools.profile.ProfileActivity.java
private void returnIntent(LinkedHashMap<String, String> commandsList) { ArrayList<String> ids = new ArrayList<>(); ArrayList<String> commands = new ArrayList<>(); Collections.addAll(ids, commandsList.keySet().toArray(new String[commands.size()])); Collections.addAll(commands, commandsList.values().toArray(new String[commands.size()])); if (commands.size() > 0) { Intent intent = new Intent(); intent.putExtra(POSITION_INTENT, mProfilePosition); intent.putExtra(RESULT_ID_INTENT, ids); intent.putExtra(RESULT_COMMAND_INTENT, commands); setResult(0, intent);//from w ww.jav a 2s . com finish(); } else { Utils.toast(R.string.no_changes, ProfileActivity.this); } }
From source file:hydrograph.ui.propertywindow.propertydialog.PropertyDialogBuilder.java
private void addGroupsInTab(ScrolledCompositeHolder scrolledCompositeHolder, LinkedHashMap<String, ArrayList<Property>> subgroupTree) { for (String subgroupName : subgroupTree.keySet()) { Property property = subgroupTree.get(subgroupName).get(0); AbstractELTContainerWidget subGroupContainer = getGroupWidgetContainer(scrolledCompositeHolder, subgroupName, property); addCustomWidgetsToGroupWidget(subgroupTree, subgroupName, subGroupContainer); }// w ww . j a v a2 s.com }
From source file:eu.hydrologis.jgrass.charting.impl.JGrassXYLineChart.java
/** * A line chart creator basing on series made up two values per row. More series, independing * one from the other are supported./* ww w. j ava 2s .c om*/ * * @param chartValues - a hashmap containing as keys the name of the series and as values the * double[][] representing the data. Important: the data matrix has to be passed as two * rows (not two columns) */ public JGrassXYLineChart(LinkedHashMap<String, double[][]> chartValues) { chartSeries = new XYSeries[chartValues.size()]; // extrapolate the data from the Hashmap and convert it to a XYSeries // Collection final Iterator<String> it = chartValues.keySet().iterator(); int count = 0; while (it.hasNext()) { final String key = it.next(); final double[][] values = chartValues.get(key); chartSeries[count] = new XYSeries(key); for (int i = 0; i < values[0].length; i++) { // important: the data matrix has to be passed as two rows (not // two columns) double val = values[1][i]; if (isNovalue(val)) continue; chartSeries[count].add(values[0][i], val); } count++; } lineDataset = new XYSeriesCollection(); for (int i = 0; i < chartSeries.length; i++) { lineDataset.addSeries(chartSeries[i]); } }
From source file:edu.jhuapl.openessence.web.util.ControllerUtils.java
/** * Returns a new map that is sorted and then limited to the top {@code limit} values. It then places the map back * in the original sort order minus anything that has been cut. *///from www . j ava2 s . co m public static LinkedHashMap<String, Double> getSortedAndLimitedMap(LinkedHashMap<String, Double> map, Integer limit, String limitLabel) { //test if we need to trim if (limit <= 0 || limit >= map.size()) { return map; } //sort by value Map<String, Double> sortedMap = ControllerUtils.getSortedByValueMap(map); //limit and combine results Map<String, Double> sortedLimitedMap = ControllerUtils.getLimitedMap(sortedMap, limit, limitLabel); //put the original sort order back (minus the values combined) LinkedHashMap<String, Double> originalSortResultMap = new LinkedHashMap<String, Double>(limit); LinkedHashMap<String, Double> passedValuesMap = new LinkedHashMap<String, Double>(map.size()); int i = 0; for (String key : map.keySet()) { if (i < limit) { if (sortedLimitedMap.containsKey(key)) { Double value = sortedLimitedMap.get(key); //if value is not null/zero, add it and increment if (value != null && !Double.isNaN(value) && value > 0) { originalSortResultMap.put(key, value); i++; } else { //put it in a list of passed up values for inclusion at the end passedValuesMap.put(key, value); } } } } //if we still have room after adding all sorted non zero values... fill the rest with passed values if (i < limit) { for (String key : passedValuesMap.keySet()) { if (i < limit) { originalSortResultMap.put(key, passedValuesMap.get(key)); i++; } } } //add combined field if it is not null (indicates it was used even if the value is 0) Double cVal = sortedLimitedMap.get(limitLabel); if (cVal != null && !Double.isNaN(cVal)) { originalSortResultMap.put(limitLabel, cVal); } return originalSortResultMap; }
From source file:com.genentech.application.property.TPSA.java
private void runTest(boolean countP, boolean countS) { String canSmi;/*from w w w . ja v a 2 s . co m*/ OEGraphMol mol = new OEGraphMol(); LinkedHashMap<String, String> smilesMap = new LinkedHashMap<String, String>(); loadHashMap(smilesMap); Iterator<String> iter = smilesMap.keySet().iterator(); oemolostream ofs = new oemolostream(); ofs.open("TPSA_test.sdf"); while (iter.hasNext()) { String key = (String) iter.next(); String PSA = (String) smilesMap.get(key); mol.Clear(); if (oechem.OEParseSmiles(mol, key)) { mol.SetTitle(key); System.out.print(key + ": "); double tpsa = calculateTPSA(mol, countP, countS); mol.SetTitle(key); oechem.OEAddSDData(mol, "TPSA", String.valueOf(tpsa)); canSmi = oechem.OECreateCanSmiString(mol); oechem.OEAddSDData(mol, "CanSmiles", canSmi); System.out.printf("Lit: %s\tTPSA: %.2f\n", PSA, tpsa); oechem.OEWriteMolecule(ofs, mol); } else { System.err.println("Error parsing the SMILES string: " + key); } } }
From source file:eu.hydrologis.jgrass.charting.impl.JGrassXYBarChart.java
/** * A line chart creator basing on series made up two values per row. More series, independing * one from the other are supported.//from www.jav a 2 s .com * * @param chartValues - a hashmap containing as keys the name of the series and as values the * double[][] representing the data. Important: the data matrix has to be passed as two * rows (not two columns) * @param barwidth */ public JGrassXYBarChart(LinkedHashMap<String, double[][]> chartValues, double barwidth) { chartSeries = new XYSeries[chartValues.size()]; // extrapolate the data from the Hashmap and convert it to a XYSeries // Collection Iterator<String> it = chartValues.keySet().iterator(); int count = 0; while (it.hasNext()) { String key = it.next(); double[][] values = chartValues.get(key); chartSeries[count] = new XYSeries(key); for (int i = 0; i < values[0].length; i++) { // important: the data matrix has to be passed as two rows (not // two columns) double val = values[1][i]; if (isNovalue(val)) continue; chartSeries[count].add(values[0][i], val); } count++; } barDataset = new XYSeriesCollection(); for (int i = 0; i < chartSeries.length; i++) { barDataset.addSeries(chartSeries[i]); } dataset = new XYBarDataset(barDataset, barwidth); }
From source file:com.allinfinance.dwr.system.SelectOptionsDWR.java
/** * ????/*from ww w. ja v a2s . co m*/ * @param txnId * @return */ public String getComboDataWithParameter(String txnId, String parameter, HttpServletRequest request, HttpServletResponse response) { String jsonData = "{data:[{'valueField':'','displayField':'?'}]}"; try { //?? Operator operator = (Operator) request.getSession().getAttribute(Constants.OPERATOR_INFO); LinkedHashMap<String, String> dataMap = SelectOption.getSelectView(txnId, new Object[] { operator, parameter }); Iterator<String> iter = dataMap.keySet().iterator(); if (iter.hasNext()) { Map<String, Object> jsonDataMap = new HashMap<String, Object>(); LinkedList<Object> jsonDataList = new LinkedList<Object>(); Map<String, String> tmpMap = null; String key = null; while (iter.hasNext()) { tmpMap = new LinkedHashMap<String, String>(); key = iter.next(); tmpMap.put("valueField", key); tmpMap.put("displayField", dataMap.get(key)); jsonDataList.add(tmpMap); } jsonDataMap.put("data", jsonDataList); jsonData = JSONBean.genMapToJSON(jsonDataMap); } } catch (Exception e) { e.printStackTrace(); log.error(e.getMessage()); } // System.out.println(jsonData); return jsonData; }
From source file:com.streamsets.pipeline.stage.processor.jdbcmetadata.JdbcMetadataProcessor.java
@Override protected void process(Record record, BatchMaker batchMaker) throws StageException { try {/* w w w.j a v a 2 s . c o m*/ ELVars variables = getContext().createELVars(); RecordEL.setRecordInContext(variables, record); TimeEL.setCalendarInContext(variables, Calendar.getInstance()); TimeNowEL.setTimeNowInContext(variables, new Date()); String schema = (schemaEL != null) ? elEvals.dbNameELEval.eval(variables, schemaEL, String.class) : null; String tableName = elEvals.tableNameELEval.eval(variables, tableNameEL, String.class); if (StringUtils.isEmpty(schema)) { schema = null; } // Obtain the record structure from current record LinkedHashMap<String, JdbcTypeInfo> recordStructure = JdbcMetastoreUtil.convertRecordToJdbcType(record, decimalDefaultsConfig.precisionAttribute, decimalDefaultsConfig.scaleAttribute, schemaWriter); if (recordStructure.isEmpty()) { batchMaker.addRecord(record); return; } LinkedHashMap<String, JdbcTypeInfo> tableStructure = null; try { tableStructure = tableCache.get(Pair.of(schema, tableName)); } catch (ExecutionException e) { throw new JdbcStageCheckedException(JdbcErrors.JDBC_203, e.getMessage(), e); } if (tableStructure.isEmpty()) { // Create table schemaWriter.createTable(schema, tableName, recordStructure); tableCache.put(Pair.of(schema, tableName), recordStructure); } else { // Compare tables LinkedHashMap<String, JdbcTypeInfo> columnDiff = JdbcMetastoreUtil.getDiff(tableStructure, recordStructure); if (!columnDiff.isEmpty()) { LOG.trace("Detected drift for table {} - new columns: {}", tableName, StringUtils.join(columnDiff.keySet(), ",")); schemaWriter.alterTable(schema, tableName, columnDiff); tableCache.put(Pair.of(schema, tableName), recordStructure); } } batchMaker.addRecord(record); } catch (JdbcStageCheckedException error) { LOG.error("Error happened when processing record", error); LOG.trace("Record that caused the error: {}", record.toString()); errorRecordHandler.onError(new OnRecordErrorException(record, error.getErrorCode(), error.getParams())); } }