List of usage examples for java.util HashMap remove
public V remove(Object key)
From source file:org.apache.impala.analysis.ToSqlUtils.java
/** * Returns a "CREATE TABLE" or "CREATE VIEW" statement that creates the specified * table./*from w w w . j a v a 2 s . c om*/ */ public static String getCreateTableSql(Table table) throws CatalogException { Preconditions.checkNotNull(table); if (table instanceof View) return getCreateViewSql((View) table); org.apache.hadoop.hive.metastore.api.Table msTable = table.getMetaStoreTable(); HashMap<String, String> properties = Maps.newHashMap(msTable.getParameters()); if (properties.containsKey("transient_lastDdlTime")) { properties.remove("transient_lastDdlTime"); } boolean isExternal = msTable.getTableType() != null && msTable.getTableType().equals(TableType.EXTERNAL_TABLE.toString()); String comment = properties.get("comment"); for (String hiddenProperty : HIDDEN_TABLE_PROPERTIES) { properties.remove(hiddenProperty); } ArrayList<String> colsSql = Lists.newArrayList(); ArrayList<String> partitionColsSql = Lists.newArrayList(); boolean isHbaseTable = table instanceof HBaseTable; for (int i = 0; i < table.getColumns().size(); i++) { if (!isHbaseTable && i < table.getNumClusteringCols()) { partitionColsSql.add(columnToSql(table.getColumns().get(i))); } else { colsSql.add(columnToSql(table.getColumns().get(i))); } } RowFormat rowFormat = RowFormat.fromStorageDescriptor(msTable.getSd()); HdfsFileFormat format = HdfsFileFormat.fromHdfsInputFormatClass(msTable.getSd().getInputFormat()); HdfsCompression compression = HdfsCompression.fromHdfsInputFormatClass(msTable.getSd().getInputFormat()); String location = isHbaseTable ? null : msTable.getSd().getLocation(); Map<String, String> serdeParameters = msTable.getSd().getSerdeInfo().getParameters(); String storageHandlerClassName = table.getStorageHandlerClassName(); List<String> primaryKeySql = Lists.newArrayList(); String kuduPartitionByParams = null; if (table instanceof KuduTable) { KuduTable kuduTable = (KuduTable) table; // Kudu tables don't use LOCATION syntax location = null; format = HdfsFileFormat.KUDU; // Kudu tables cannot use the Hive DDL syntax for the storage handler storageHandlerClassName = null; properties.remove(KuduTable.KEY_STORAGE_HANDLER); String kuduTableName = properties.get(KuduTable.KEY_TABLE_NAME); Preconditions.checkNotNull(kuduTableName); if (kuduTableName .equals(KuduUtil.getDefaultCreateKuduTableName(table.getDb().getName(), table.getName()))) { properties.remove(KuduTable.KEY_TABLE_NAME); } // Internal property, should not be exposed to the user. properties.remove(StatsSetupConst.DO_NOT_UPDATE_STATS); if (!isExternal) { primaryKeySql.addAll(kuduTable.getPrimaryKeyColumnNames()); List<String> paramsSql = Lists.newArrayList(); for (KuduPartitionParam param : kuduTable.getPartitionBy()) { paramsSql.add(param.toSql()); } kuduPartitionByParams = Joiner.on(", ").join(paramsSql); } else { // We shouldn't output the columns for external tables colsSql = null; } } HdfsUri tableLocation = location == null ? null : new HdfsUri(location); return getCreateTableSql(table.getDb().getName(), table.getName(), comment, colsSql, partitionColsSql, primaryKeySql, kuduPartitionByParams, properties, serdeParameters, isExternal, false, rowFormat, format, compression, storageHandlerClassName, tableLocation); }
From source file:de.hbz.lobid.helper.CompareJsonMaps.java
private static void handleOrderedValues(final HashMap<String, String> actualMap, final Entry<String, String> e) { CompareJsonMaps.logger.debug("Test if proper order for: " + e.getKey()); if (actualMap.containsKey(e.getKey())) { CompareJsonMaps.logger.trace("Existing as expected: " + e.getKey()); if (e.getValue().equals(actualMap.get(e.getKey()))) { CompareJsonMaps.logger.trace("Equality:\n" + e.getValue() + "\n" + actualMap.get(e.getKey())); actualMap.remove(e.getKey()); } else/*from w ww. j av a 2 s.c om*/ CompareJsonMaps.logger.debug("...but not equal! Will fail"); } else { CompareJsonMaps.logger.warn("Missing: " + e.getKey() + " , will fail"); } }
From source file:com.cloudera.impala.analysis.ToSqlUtils.java
/** * Returns a "CREATE TABLE" statement that creates the specified table. *//* ww w . j a v a 2 s . com*/ public static String getCreateTableSql(Table table) throws CatalogException { Preconditions.checkNotNull(table); org.apache.hadoop.hive.metastore.api.Table msTable = table.getMetaStoreTable(); HashMap<String, String> properties = Maps.newHashMap(msTable.getParameters()); boolean isExternal = msTable.getTableType() != null && msTable.getTableType().equals(TableType.EXTERNAL_TABLE.toString()); String comment = properties.get("comment"); for (String hiddenProperty : HIDDEN_TABLE_PROPERTIES) { properties.remove(hiddenProperty); } ArrayList<String> colsSql = Lists.newArrayList(); ArrayList<String> partitionColsSql = Lists.newArrayList(); boolean isHbaseTable = table instanceof HBaseTable; for (int i = 0; i < table.getColumns().size(); i++) { if (!isHbaseTable && i < table.getNumClusteringCols()) { partitionColsSql.add(columnToSql(table.getColumns().get(i))); } else { colsSql.add(columnToSql(table.getColumns().get(i))); } } RowFormat rowFormat = RowFormat.fromStorageDescriptor(msTable.getSd()); HdfsFileFormat format = HdfsFileFormat.fromHdfsInputFormatClass(msTable.getSd().getInputFormat()); HdfsCompression compression = HdfsCompression.fromHdfsInputFormatClass(msTable.getSd().getInputFormat()); String location = isHbaseTable ? null : msTable.getSd().getLocation(); Map<String, String> serdeParameters = msTable.getSd().getSerdeInfo().getParameters(); return getCreateTableSql(table.getDb().getName(), table.getName(), comment, colsSql, partitionColsSql, properties, serdeParameters, isExternal, false, rowFormat, format, compression, table.getStorageHandlerClassName(), location); }
From source file:org.zaproxy.zap.extension.cmss.WebAppGuesser.java
public static ArrayList<String> fingerPrintFile(String appName) throws MalformedURLException, IOException, NoSuchAlgorithmException, DecoderException { ArrayList<String> versions = new ArrayList<String>(); boolean stop = false; Document doc = loadFingerPrintingDB((appName2dbPath(appName))); Element racine = doc.getRootElement(); for (int i = 0; i < racine.getChildren().size(); i++) { Element file = (Element) racine.getChildren().get(i); String path = file.getAttributeValue("path"); if (checkIfExist(urlToGuess, path)) { System.out.println("path that match = " + path); // ------------------------------------------------- // TODO here i must introduce accuracy // options to specify accuracy fingerprinting level // -------------------------------------------------- for (int j = 0; j < file.getChildren().size(); j++) { Element hashNode = (Element) file.getChildren().get(j); String hash = hashNode.getAttributeValue("md5"); /*String chksum = CMSFingerprinter.checkUrlContentChecksums( new URL(urlToGuess.toString()+path));*/ /*String chksum = CMSFingerprinter. checksum(wp.getDocument()+urlToGuess.toString()+path);*/ // We convert the url content and the file path into byte arrays, then // we concatenate them, then we calculate its checksum byte[] octets1 = new byte[0]; CMSSUtils.getFileFromUrl(new URL(urlToGuess + path)).read(octets1); byte[] octets2 = path.getBytes(); // doit etre avant la boucle for byte[] c = new byte[octets1.length + octets2.length]; System.arraycopy(octets1, 0, c, 0, octets1.length); System.arraycopy(octets2, 0, c, octets1.length, octets2.length); String chksum = CMSSUtils.checksum(c); System.out.println("hash = " + hash); System.out.println("chksum = " + chksum); if (hash.compareTo(chksum) == 0) { stop = true;//from w w w . j a v a 2 s . c o m System.out.println("hhhhhhhh"); ArrayList<String> pathAssociatedVerions = new ArrayList<String>(); for (int k = 0; k < hashNode.getChildren().size(); k++) { Element versionNode = (Element) hashNode.getChildren().get(k); String version = versionNode.getValue(); version = version.substring(0, 3); if (!pathAssociatedVerions.contains(version)) { pathAssociatedVerions.add(version); } System.out.println(" version==" + version); } for (String app : pathAssociatedVerions) { versions.add(app); } break; // parceque un fichier sur le net n'a pas deux hashes } } if (stop) break; // should analyze all files } else /*System.out.println("dont exist !!")*/ ; } HashMap<String, Integer> calculList = new HashMap<String, Integer>(); ArrayList<String> finalResult = new ArrayList<String>(); for (String version : versions) { if (calculList.containsKey(version)) { int nbr = calculList.get(version); calculList.remove(version); calculList.put(version, nbr + 1); } else { calculList.put(version, 1); } } int max = 1; for (Entry<String, Integer> entry : calculList.entrySet()) { int occ = entry.getValue(); if (occ > max) { max = occ; System.out.println(max); } } for (Entry<String, Integer> entry : calculList.entrySet()) { if (entry.getValue() == max) finalResult.add(entry.getKey()); } return finalResult; }
From source file:org.fao.geonet.kernel.csw.services.GetDomain.java
/** * @param sortedValues//from w w w.j av a 2 s . c o m * @param fieldValues * @param duplicateValues */ private static void addtoSortedSet(SortedSet<String> sortedValues, String[] fieldValues, HashMap<String, Integer> duplicateValues) { for (String value : fieldValues) { sortedValues.add(value); if (duplicateValues.containsKey(value)) { int nb = duplicateValues.get(value); duplicateValues.remove(value); duplicateValues.put(value, nb + 1); } else duplicateValues.put(value, 1); } }
From source file:Main.java
@SuppressWarnings("unchecked") static public LinkedHashMap<Object, Comparable> sortHashMapByValues(HashMap<Object, Comparable> passedMap) { ArrayList mapKeys = new ArrayList(passedMap.keySet()); ArrayList mapValues = new ArrayList(passedMap.values()); Collections.sort(mapValues);// w w w . j a v a2s .c o m Collections.sort(mapKeys); LinkedHashMap<Object, Comparable> sortedMap = new LinkedHashMap<Object, Comparable>(); Iterator<Comparable> valueIt = mapValues.iterator(); while (valueIt.hasNext()) { Comparable val = valueIt.next(); Iterator keyIt = mapKeys.iterator(); while (keyIt.hasNext()) { Object key = keyIt.next(); Comparable comp = passedMap.get(key); if (comp.equals(val)) { passedMap.remove(key); mapKeys.remove(key); sortedMap.put(key, val); break; } } } return sortedMap; }
From source file:org.rhq.enterprise.gui.legacy.util.SessionUtils.java
/** * Remove any old workflows/*from ww w. j a v a 2 s.com*/ * * @param session * @param workflowName */ public static void clearWorkflow(HttpSession session, String workflowName) { HashMap workflows = (HashMap) session.getAttribute(AttrConstants.WORKFLOW_SES_ATTR); if (workflows != null) { workflows.remove(workflowName); } }
From source file:org.rhq.plugins.database.ComponentTest.java
/** * Asserts that all measurements in the report are present * according to the resource descriptor. * * @see #getResourceDescriptor(String) for obtaining this. * @param report//from ww w . j a v a 2s . co m */ public static void assertAll(MeasurementReport report, ResourceDescriptor l) { HashMap<String, MetricDescriptor> map = new HashMap<String, MetricDescriptor>(); for (MetricDescriptor md : l.getMetric()) { map.put(md.getProperty(), md); } for (MeasurementDataNumeric n : report.getNumericData()) { map.remove(n.getName()); } for (MeasurementDataTrait n : report.getTraitData()) { map.remove(n.getName()); } assertTrue("Measurements not found " + map.keySet(), map.isEmpty()); }
From source file:com.clustercontrol.hinemosagent.util.AgentConnectUtil.java
public static void deleteAgent(String facilityId, AgentInfo agentInfo) { try {//from w w w . j av a 2 s . c om _agentCacheLock.writeLock(); HashMap<String, AgentInfo> agentMap = getAgentCache(); agentMap.remove(facilityId); storeAgentCache(agentMap); } finally { _agentCacheLock.writeUnlock(); } try { _agentLibMd5CacheLock.writeLock(); HashMap<String, Map<String, String>> agentLibMd5 = getAgentLibMd5Cache(); agentLibMd5.remove(facilityId); storeAgentLibMd5Cache(agentLibMd5); } finally { _agentLibMd5CacheLock.writeUnlock(); } //???? JobSessionNodeImpl nodeImple = new JobSessionNodeImpl(); nodeImple.endNodeByAgent(facilityId, agentInfo, true); }
From source file:com.clustercontrol.jobmanagement.factory.FullJob.java
public static void removeCache(String jobunitId) { m_log.debug("removeCache " + jobunitId); try {//w w w .ja va 2 s.c o m _lock.writeLock(); HashMap<String, Map<String, JobInfo>> jobInfoCache = getJobInfoCache(); if (jobInfoCache.remove(jobunitId) != null) { storeJobInfoCache(jobInfoCache); } HashMap<String, Map<String, JobMstEntity>> jobMstCache = getJobMstCache(); if (jobMstCache.remove(jobunitId) != null) { storeJobMstCache(jobMstCache); } } finally { _lock.writeUnlock(); } }