List of usage examples for java.util LinkedHashMap entrySet
public Set<Map.Entry<K, V>> entrySet()
From source file:com.streamsets.pipeline.lib.jdbc.multithread.TableContextUtil.java
private void checkForUnsupportedOffsetColumns(LinkedHashMap<String, Integer> offsetColumnToType) throws StageException { //Validate if there are partition column types for offset maintenance List<String> unsupportedOffsetColumnAndType = new ArrayList<>(); for (Map.Entry<String, Integer> offsetColumnToTypeEntry : offsetColumnToType.entrySet()) { if (OffsetQueryUtil.UNSUPPORTED_OFFSET_SQL_TYPES.contains(offsetColumnToTypeEntry.getValue())) { unsupportedOffsetColumnAndType .add(offsetColumnToTypeEntry.getKey() + " - " + offsetColumnToTypeEntry.getValue()); }/* w ww.j ava 2s. c o m*/ } if (!unsupportedOffsetColumnAndType.isEmpty()) { throw new StageException(JdbcErrors.JDBC_69, COMMA_JOINER.join(unsupportedOffsetColumnAndType)); } }
From source file:pt.lsts.neptus.plugins.sunfish.awareness.HubLocationProvider.java
@Periodic(millisBetweenUpdates = 3000 * 60) public void sendToHub() { if (!enabled) return;//from w w w . ja v a 2s. co m NeptusLog.pub().info("Uploading device updates to Hub..."); LinkedHashMap<Integer, AssetPosition> toSend = new LinkedHashMap<Integer, AssetPosition>(); LocationType myLoc = MyState.getLocation(); AssetPosition myPos = new AssetPosition(StringUtils.toImcName(GeneralPreferences.imcCcuName), myLoc.getLatitudeDegs(), myLoc.getLongitudeDegs()); toSend.put(ImcMsgManager.getManager().getLocalId().intValue(), myPos); toSend.putAll(positionsToSend); positionsToSend.clear(); DeviceUpdate upd = new DeviceUpdate(); //ExtendedDeviceUpdate upd = new ExtendedDeviceUpdate(); upd.source = ImcMsgManager.getManager().getLocalId().intValue(); upd.destination = 65535; for (Entry<Integer, AssetPosition> pos : toSend.entrySet()) { Position p = new Position(); p.id = pos.getKey(); p.latRads = pos.getValue().getLoc().getLatitudeRads(); p.lonRads = pos.getValue().getLoc().getLongitudeRads(); p.posType = Position.fromImcId(p.id); p.timestamp = pos.getValue().getTimestamp() / 1000.0; upd.getPositions().put(pos.getKey(), p); } for (Position p : upd.getPositions().values()) { NeptusLog.pub().info("Uploading position for " + p.id + ": " + Math.toDegrees(p.latRads) + "/" + Math.toDegrees(p.lonRads) + "/" + new Date((long) (1000 * p.timestamp))); } try { HttpPost postMethod = new HttpPost(iridiumUrl); postMethod.setHeader("Content-type", "application/hub"); String data = new String(Hex.encodeHex(upd.serialize())); NeptusLog.pub().info("Sending '" + data + "'"); StringEntity ent = new StringEntity(data); postMethod.setEntity(ent); @SuppressWarnings("resource") HttpClient client = new DefaultHttpClient(); HttpResponse response = client.execute(postMethod); NeptusLog.pub().info("Sent " + upd.getPositions().size() + " device updates to Hub: " + response.getStatusLine().toString()); postMethod.abort(); } catch (Exception e) { NeptusLog.pub().error("Error sending updates to hub", e); parent.postNotification(Notification .error("Situation Awareness", e.getClass().getSimpleName() + " while trying to send device updates to HUB.") .requireHumanAction(false)); } }
From source file:gov.llnl.lc.smt.command.link.SmtLink.java
private int getNumAtLevel(LinkedHashMap<String, IB_Edge> eMap, int level) { // given a map of edges, return only those at the desired level (sort by guid then port number) int num = 0;/*from www .j a v a 2 s.c o m*/ // iterate through the map, and add only those with the desired depth or level for (Entry<String, IB_Edge> entry : eMap.entrySet()) { IB_Edge e = entry.getValue(); if (e.getDepth() == level) num++; } return num; }
From source file:com.taobao.datax.plugins.writer.oraclejdbcwriter.OracleJdbcWriter.java
public String buildInsertString() { StringBuilder sb = new StringBuilder(); sb.append("INSERT INTO ").append(this.schema + "." + this.table).append(" "); if (!StringUtils.isEmpty(this.colorder)) { sb.append("(").append(this.colorder).append(")"); }/*from w w w. j a v a2 s . co m*/ sb.append(" VALUES("); try { ResultSet rs = this.connection.createStatement() .executeQuery("SELECT COLUMN_NAME,DATA_TYPE FROM USER_TAB_COLUMNS WHERE TABLE_NAME='" + this.table.toUpperCase() + "'"); LinkedHashMap<String, String> map = new LinkedHashMap<String, String>(); while (rs.next()) { String colName = rs.getString(1); String colType = rs.getString(2); map.put(colName, colType); } logger.debug("Column map:size=" + map.size() + ";cols=" + map.toString()); if (StringUtils.isEmpty(this.colorder)) { Iterator<Entry<String, String>> it = map.entrySet().iterator(); while (it.hasNext()) { Entry<String, String> entry = it.next(); String colType = entry.getValue(); if (colType.toUpperCase().equals("DATE")) { sb.append("to_date(?,'" + this.dtfmt + "'),"); } else { sb.append("?,"); } } sb.deleteCharAt(sb.length() - 1);// remove last comma sb.append(")"); } else { String[] arr = colorder.split(","); for (String colName : arr) { if (!map.containsKey(colName)) { throw new DataExchangeException("col " + colName + " not in database"); } String colType = map.get(colName); if (colType.toUpperCase().equals("DATE")) { sb.append("to_date(?,'" + this.dtfmt + "'),"); } else { sb.append("?,"); } } sb.deleteCharAt(sb.length() - 1);// remove last comma sb.append(")"); } } catch (SQLException e) { e.printStackTrace(); throw new DataExchangeException(e.getMessage()); } return sb.toString(); }
From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java
public void testEntrySet() { LinkedHashMap<String, String> hashMap = new LinkedHashMap<String, String>(); checkEmptyLinkedHashMapAssumptions(hashMap); Set<Entry<String, String>> entrySet = hashMap.entrySet(); assertNotNull(entrySet);/*from w ww. j ava2s . co m*/ // Check that the entry set looks right hashMap.put(KEY_TEST_ENTRY_SET, VALUE_TEST_ENTRY_SET_1); entrySet = hashMap.entrySet(); assertEquals(entrySet.size(), SIZE_ONE); Iterator<Entry<String, String>> itSet = entrySet.iterator(); Map.Entry<String, String> entry = itSet.next(); assertEquals(entry.getKey(), KEY_TEST_ENTRY_SET); assertEquals(entry.getValue(), VALUE_TEST_ENTRY_SET_1); // Check that entries in the entrySet are update correctly on overwrites hashMap.put(KEY_TEST_ENTRY_SET, VALUE_TEST_ENTRY_SET_2); entrySet = hashMap.entrySet(); assertEquals(entrySet.size(), SIZE_ONE); itSet = entrySet.iterator(); entry = itSet.next(); assertEquals(entry.getKey(), KEY_TEST_ENTRY_SET); assertEquals(entry.getValue(), VALUE_TEST_ENTRY_SET_2); // Check that entries are updated on removes hashMap.remove(KEY_TEST_ENTRY_SET); checkEmptyLinkedHashMapAssumptions(hashMap); }
From source file:nl.nn.adapterframework.webcontrol.api.ShowScheduler.java
@PUT @Path("/schedules/") @Produces(MediaType.APPLICATION_JSON)//from w w w . j av a 2 s .c o m public Response putSchedules(LinkedHashMap<String, Object> json) throws ApiException { initBase(servletConfig); DefaultIbisManager manager = (DefaultIbisManager) ibisManager; SchedulerHelper sh = manager.getSchedulerHelper(); Scheduler scheduler; try { scheduler = sh.getScheduler(); } catch (SchedulerException e) { throw new ApiException("Cannot find scheduler"); } String action = null; for (Entry<String, Object> entry : json.entrySet()) { String key = entry.getKey(); if (key.equalsIgnoreCase("action")) { action = entry.getValue().toString(); } } try { String commandIssuedBy = servletConfig.getInitParameter("remoteHost"); commandIssuedBy += servletConfig.getInitParameter("remoteAddress"); commandIssuedBy += servletConfig.getInitParameter("remoteUser"); if (action.equalsIgnoreCase("start")) { if (scheduler.isInStandbyMode() || scheduler.isShutdown()) { scheduler.start(); log.info("start scheduler:" + new Date() + commandIssuedBy); } else { throw new ApiException("Failed to start scheduler"); } } else if (action.equalsIgnoreCase("pause")) { if (scheduler.isStarted()) { scheduler.standby(); log.info("pause scheduler:" + new Date() + commandIssuedBy); } else { throw new ApiException("Failed to pause scheduler"); } } else if (action.equalsIgnoreCase("stop")) { if (scheduler.isStarted() || scheduler.isInStandbyMode()) { scheduler.shutdown(); log.info("shutdown scheduler:" + new Date() + commandIssuedBy); } else { throw new ApiException("Failed to stop scheduler"); } } else { return Response.status(Response.Status.BAD_REQUEST).build(); } } catch (Exception e) { log.error("", e); } return Response.status(Response.Status.OK).build(); }
From source file:nl.nn.adapterframework.webcontrol.api.ShowConfiguration.java
@PUT @RolesAllowed({ "IbisDataAdmin", "IbisAdmin", "IbisTester" }) @Path("/configurations/{configuration}") @Consumes(MediaType.APPLICATION_JSON)//from w w w . j a va 2s . co m @Produces(MediaType.APPLICATION_JSON) public Response reloadConfiguration(@PathParam("configuration") String configurationName, LinkedHashMap<String, Object> json) throws ApiException { initBase(servletConfig); Configuration configuration = ibisManager.getConfiguration(configurationName); if (configuration == null) { throw new ApiException("Configuration not found!"); } Response.ResponseBuilder response = Response.status(Response.Status.NO_CONTENT); //PUT defaults to no content for (Entry<String, Object> entry : json.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (key.equalsIgnoreCase("action")) { if (value.equals("reload")) { ibisManager.handleAdapter("RELOAD", configurationName, "", "", null, false); } response.entity("{\"status\":\"ok\"}"); } } return response.build(); }
From source file:gov.llnl.lc.smt.command.node.SmtNode.java
public static String getActivePortSummary(OSM_FabricDelta fabricDelta, OSM_Node n, LinkedHashMap<String, IB_Link> links) { OSM_FabricDeltaAnalyzer fda = new OSM_FabricDeltaAnalyzer(fabricDelta); OSM_Fabric fabric = fabricDelta.getFabric2(); StringBuffer buff = new StringBuffer(); LinkedHashMap<String, IB_Vertex> vertexMap = IB_Vertex.createVertexMap(fabric); IB_Vertex v = IB_Vertex.getVertex(IB_Vertex.getVertexKey(n.getNodeGuid()), vertexMap); int num_links = links.size(); buff.append("(" + num_links + ") Active ports" + SmtConstants.NEW_LINE); // loop through the nodes port numbers, and print out the active ports for (int pn = 0; pn < n.sbnNode.num_ports; pn++) { String pKey = OSM_Fabric.getOSM_PortKey(n.getNodeGuid().getGuid(), (short) (pn + 1)); OSM_Port p = fabric.getOSM_Port(pKey); String errStr = fda.getPortErrorState(n.getNodeGuid(), pn + 1); // if the error string is blank, provide the up/down direction of the port if ((errStr == null) || (errStr.length() < 1)) errStr = v.getPortDirection(pn + 1) + " link"; // is this one of the active links? If so, display it for (Map.Entry<String, IB_Link> entry : links.entrySet()) { IB_Link l = entry.getValue(); if (l.contains(p)) { // not one of the down ones buff.append(SmtNode.getLinkLine(p, l, fabric, String.format(portFormatString, p.getPortNumber(), errStr)) + SmtConstants.NEW_LINE); break; }//from w w w . j a v a 2 s . co m } } return buff.toString(); }
From source file:gov.llnl.lc.smt.command.fabric.SmtFabric.java
/** * Describe the method here/*from w ww. j a v a 2s .c o m*/ * * @see gov.llnl.lc.smt.command.SmtCommand#doCommand(gov.llnl.lc.smt.command.config.SmtConfig) * * @param config * @return * @throws Exception ***********************************************************/ @Override public boolean doCommand(SmtConfig config) throws Exception { // this is the fabric command // support obtaining the fabric on-line, or from an OMS or Fabric // file. Only one at a time.... // which is all done by default within the execute() command of the // parent superclass smt-command // only one way of obtaining fabric data should be specified, but IF more // than one is, prefer; // // on-line (if host or port is specified) // OMS file // Fabric file // on-line using localhost and port 10011 Map<String, String> map = smtConfig.getConfigMap(); OSM_Configuration cfg = null; String subCommand = map.get(SmtProperty.SMT_SUBCOMMAND.getName()); if (subCommand == null) subCommand = SmtProperty.SMT_HELP.getName(); // there should only be one subcommand if (subCommand.equalsIgnoreCase(SmtProperty.SMT_FABRIC_DISCOVER.getName())) { showDiscoveredFabrics(map); return true; } else if (subCommand.equalsIgnoreCase(SmtProperty.SMT_FABRIC_CONFIG_CMD.getName())) { cfg = getOsmConfig(true); if ((cfg != null) && (cfg.getFabricConfig() != null) && (cfg.getFabricConfig().getFabricName() != null)) { // save this configuration and then perform a check OSM_Configuration.cacheOSM_Configuration(OMService.getFabricName(), cfg); System.out.println(cfg.getFabricConfig().toContent()); } else { logger.severe("Couldn't obtain Fabric configuration, check service connection."); System.err.println("Couldn't obtain Fabric configuration, check service connection."); } return true; } else if (subCommand.equalsIgnoreCase(SmtProperty.SMT_STATUS.getName())) { System.out.println(getStatus(OMService)); } else if (subCommand.equalsIgnoreCase(SmtProperty.SMT_NODE_MAP_CMD.getName())) { cfg = getOsmConfig(true); if ((cfg != null) && (cfg.getFabricConfig() != null) && (cfg.getFabricConfig().getFabricName() != null)) { // save this configuration and then perform a check OSM_Configuration.cacheOSM_Configuration(OMService.getFabricName(), cfg); System.out.println(cfg.getNodeNameMap().toContent()); } return true; } else if (subCommand.equalsIgnoreCase(SmtProperty.SMT_QUERY_TYPE.getName())) { FabricQuery qType = FabricQuery.getByName(map.get(SmtProperty.SMT_QUERY_TYPE.getName())); if (qType == null) { logger.severe("Invalid SmtFabric query option"); System.err.println("Invalid SmtFabric query option"); subCommand = SmtProperty.SMT_HELP.getName(); return false; } SmtFabricStructure fs = null; if (OMService != null) fs = new SmtFabricStructure(OMService); else System.err.println("The OMService is null, baby"); switch (qType) { case FAB_LIST: System.out.println(FabricQuery.describeAllQueryTypes()); break; case FAB_STATUS: System.out.println(getStatus(OMService)); break; case FAB_STRUCTURE: System.out.println(fs.toStringAlternate()); break; case FAB_SWITCHES: System.out.println(fs.toSwitchString()); break; case FAB_HOSTS: System.out.println(fs.toHostString()); break; case FAB_SERVICE: System.out.println(fs.toServiceString()); break; case FAB_CHECK: // check for dynamic link errors AND configuration errors System.out.println("Checking for Link errors..."); LinkedHashMap<String, String> errMap = IB_LinkInfo.getErrorLinkInfoRecords(OMService, getOSM_FabricDelta(false)); if ((errMap != null) && !(errMap.isEmpty())) for (Map.Entry<String, String> mapEntry : errMap.entrySet()) System.out.println(mapEntry.getValue()); else System.out.println(" no errors found"); System.out.println(); cfg = getOsmConfig(true); if ((cfg != null) && (cfg.getFabricConfig() != null) && (cfg.getFabricConfig().getFabricName() != null)) { // save this configuration and then perform a check OSM_Configuration.cacheOSM_Configuration(OMService.getFabricName(), cfg); OMService.getFabric().checkFabricStructure(cfg.getFabricConfig(), true); } break; case FAB_CONFIG: cfg = getOsmConfig(true); if ((cfg != null) && (cfg.getFabricConfig() != null) && (cfg.getFabricConfig().getFabricName() != null)) { // save this configuration and then perform a check OSM_Configuration.cacheOSM_Configuration(OMService.getFabricName(), cfg); System.out.println(cfg.toInfo()); } break; case FAB_WHATSUP: showWhatsUp(map); break; case FAB_ERRORS: OSM_FabricDelta fd = getOSM_FabricDelta(false); if (fd == null) { System.err.println( "FabricDelta is null. Check service connection, or perhaps just wait for another snapshot"); System.exit(0); } OSM_Fabric fabric = fd.getFabric2(); OSM_FabricDeltaAnalyzer fda = new OSM_FabricDeltaAnalyzer(fd); LinkedHashMap<String, IB_Link> links = fabric.getIB_Links(); System.out.println(getErrorNodeSummary(fda, links, false)); break; case FAB_EVENTS: if (OMService != null) System.out.println(SmtEvent.getEventSummary(getOSM_FabricDelta(false), "")); else System.err.println("An OMS instance is required (connection or file)"); break; case FAB_ROUTE: if (OMService != null) { RT_Table RoutingTable = RT_Table.buildRT_Table(OMService.getFabric()); System.out.println(SmtRoute.getRouteTableSummary(OMService, RoutingTable)); } break; default: // should never get here, because it will be trapped above System.err.println("Invalid SmtFabric query option, again"); break; } } else if (OMService != null) { System.out.println(getStatus(OMService)); } return true; }
From source file:org.apache.hadoop.hive.ql.parse.NewGroupByUtils1.java
private ArrayList<GenericUDAFEvaluator> genAllGenericUDAFEvaluators(QB qb, String dest, RowResolver rowResolver) throws SemanticException { ArrayList<GenericUDAFEvaluator> genericUDAFEvaluators = new ArrayList<GenericUDAFEvaluator>(); QBParseInfo parseInfo = qb.getParseInfo(); LinkedHashMap<String, ASTNode> aggregationTrees = parseInfo.getAggregationExprsForClause(dest); for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) { ASTNode value = entry.getValue(); String aggName = value.getChild(0).getText(); ArrayList<exprNodeDesc> aggParameters = new ArrayList<exprNodeDesc>(); for (int i = 1; i < value.getChildCount(); i++) { ASTNode paraExpr = (ASTNode) value.getChild(i); exprNodeDesc paraExprDesc = SemanticAnalyzer.genExprNodeDesc(paraExpr, rowResolver, qb, -1, conf); aggParameters.add(paraExprDesc); }/*from w w w. ja v a 2 s .c om*/ boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI; boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR; GenericUDAFEvaluator genericUDAFEvaluator = SemanticAnalyzer.getGenericUDAFEvaluator(aggName, aggParameters, value, isDistinct, isAllColumns); genericUDAFEvaluators.add(genericUDAFEvaluator); } return genericUDAFEvaluators; }