List of usage examples for java.util Set iterator
Iterator<E> iterator();
From source file:cascading.flow.hadoop.util.HadoopUtil.java
public static void addComparators(Configuration conf, String property, Map<String, Fields> map, BaseFlowStep flowStep, Group group) { Iterator<Fields> fieldsIterator = map.values().iterator(); if (!fieldsIterator.hasNext()) return;/*from www . j a v a 2 s . c o m*/ Fields fields = fieldsIterator.next(); if (fields.hasComparators()) { conf.set(property, pack(fields, conf)); return; } // use resolved fields if there are no comparators. Set<Scope> previousScopes = flowStep.getPreviousScopes(group); fields = previousScopes.iterator().next().getOutValuesFields(); if (fields.size() != 0) // allows fields.UNKNOWN to be used conf.setInt(property + ".size", fields.size()); }
From source file:de.tu_dortmund.ub.data.dswarm.TaskProcessingUnit.java
private static void executeTPUPartsOnDemand(final Optional<Boolean> optionalDoInit, final Optional<Boolean> optionalAllowMultipleDataModels, String[] watchFolderFiles, final String resourceWatchFolder, final Optional<String> optionalOutputDataModelID, final String serviceName, final Integer engineThreads, final Optional<Boolean> optionalDoTransformations, final Optional<Boolean> optionalDoIngestOnTheFly, final Optional<Boolean> optionalDoExportOnTheFly, final Optional<String> optionalExportMimeType, final Optional<String> optionalExportFileExtension, final Properties config) throws Exception { // keys = input data models; values = related data resources final Map<String, Triple<String, String, String>> inputDataModelsAndResources = new HashMap<>(); // init/*w w w .java 2s .c o m*/ if (optionalDoInit.isPresent() && optionalDoInit.get()) { if (optionalAllowMultipleDataModels.isPresent() && optionalAllowMultipleDataModels.get()) { for (int i = 0; i < watchFolderFiles.length; i++) { final String initResourceFileName = watchFolderFiles[i]; doInit(resourceWatchFolder, initResourceFileName, serviceName, engineThreads, config, inputDataModelsAndResources); // remove the file already processed during init from the files list to avoid duplicates watchFolderFiles = ArrayUtils.removeElement(watchFolderFiles, initResourceFileName); } } else { // use the first file in the folder for init final String initResourceFileName = watchFolderFiles[0]; doInit(resourceWatchFolder, initResourceFileName, serviceName, engineThreads, config, inputDataModelsAndResources); // remove the file already processed during init from the files list to avoid duplicates watchFolderFiles = ArrayUtils.removeElement(watchFolderFiles, initResourceFileName); } } else { final String inputDataModelID = config.getProperty(TPUStatics.PROTOTYPE_INPUT_DATA_MODEL_ID_IDENTIFIER); final String resourceID = config.getProperty(TPUStatics.PROTOTYPE_RESOURCE_ID_INDENTIFIER); inputDataModelsAndResources.put(inputDataModelID, Triple.of(inputDataModelID, resourceID, null)); LOG.info("skip init part"); } final Optional<Boolean> optionalDoIngest = TPUUtil.getBooleanConfigValue(TPUStatics.DO_INGEST_IDENTIFIER, config); // ingest if (optionalDoIngest.isPresent() && optionalDoIngest.get()) { final String projectName = config.getProperty(TPUStatics.PROJECT_NAME_IDENTIFIER); if (!optionalAllowMultipleDataModels.isPresent() || !optionalAllowMultipleDataModels.get()) { final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); final Iterator<Map.Entry<String, Triple<String, String, String>>> iterator = entries.iterator(); final Map.Entry<String, Triple<String, String, String>> entry = iterator.next(); final String inputDataModelID = entry.getKey(); final Triple<String, String, String> triple = entry.getValue(); final String resourceID = triple.getMiddle(); executeIngests(watchFolderFiles, inputDataModelID, resourceID, projectName, serviceName, engineThreads, config); } } else { LOG.info("skip ingest"); } if (!optionalOutputDataModelID.isPresent()) { throw new Exception( "please set an output data model ('prototype.outputDataModelID') for this TPU task"); } final String outputDataModelID = optionalOutputDataModelID.get(); // task execution if (optionalDoTransformations.isPresent() && optionalDoTransformations.get()) { if (optionalAllowMultipleDataModels.isPresent() && optionalAllowMultipleDataModels.get()) { final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); for (final Map.Entry<String, Triple<String, String, String>> entry : entries) { final String inputDataModelID = entry.getKey(); executeTransform(inputDataModelID, outputDataModelID, optionalDoIngestOnTheFly, optionalDoExportOnTheFly, optionalExportMimeType, optionalExportFileExtension, engineThreads, serviceName, config); } } else { final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); final Iterator<Map.Entry<String, Triple<String, String, String>>> iterator = entries.iterator(); final Map.Entry<String, Triple<String, String, String>> entry = iterator.next(); final String inputDataModelID = entry.getKey(); executeTransform(inputDataModelID, outputDataModelID, optionalDoIngestOnTheFly, optionalDoExportOnTheFly, optionalExportMimeType, optionalExportFileExtension, engineThreads, serviceName, config); } } else { LOG.info("skip transformations"); } final Optional<Boolean> optionalDoExport = TPUUtil.getBooleanConfigValue(TPUStatics.DO_EXPORT_IDENTIFIER, config); // export if (optionalDoExport.isPresent() && optionalDoExport.get()) { if (!optionalAllowMultipleDataModels.isPresent() || !optionalAllowMultipleDataModels.get()) { final String exportDataModelID; if (outputDataModelID != null && !outputDataModelID.trim().isEmpty()) { exportDataModelID = outputDataModelID; } else { final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); final Iterator<Map.Entry<String, Triple<String, String, String>>> iterator = entries.iterator(); final Map.Entry<String, Triple<String, String, String>> entry = iterator.next(); exportDataModelID = entry.getKey(); } executeExport(exportDataModelID, optionalExportMimeType, optionalExportFileExtension, engineThreads, serviceName, config); } } else { LOG.info("skip export"); } // clean-up int cnt = 0; final String engineDswarmAPI = config.getProperty(TPUStatics.ENGINE_DSWARM_API_IDENTIFIER); final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); for (final Map.Entry<String, Triple<String, String, String>> entry : entries) { final Triple<String, String, String> triple = entry.getValue(); final String inputDataModelId = triple.getLeft(); final String resourceId = triple.getMiddle(); final String configurationId = triple.getRight(); TPUUtil.deleteObject(inputDataModelId, DswarmBackendStatics.DATAMODELS_ENDPOINT, serviceName, engineDswarmAPI, cnt); TPUUtil.deleteObject(resourceId, DswarmBackendStatics.RESOURCES_ENDPOINT, serviceName, engineDswarmAPI, cnt); TPUUtil.deleteObject(configurationId, DswarmBackendStatics.CONFIGURATIONS_ENDPOINT, serviceName, engineDswarmAPI, cnt); cnt++; } }
From source file:com.joyent.manta.util.MantaUtils.java
/** * Naively converts a map to a single CSV string. Warning: this doesn't * escape.//from w w w . j av a 2 s .co m * * @param map map with objects with implemented toString methods * @return CSV string or empty string */ public static String csv(final Map<?, ?> map) { Validate.notNull(map, "Map must not be null"); final StringBuilder builder = new StringBuilder(); /* We do this contorted type conversion because of Java's generics. */ @SuppressWarnings("rawtypes") final Map noGenericsMap = map; @SuppressWarnings({ "rawtypes", "unchecked" }) final Set<Map.Entry<?, ?>> set = noGenericsMap.entrySet(); final Iterator<Map.Entry<?, ?>> itr = set.iterator(); while (itr.hasNext()) { Map.Entry<?, ?> entry = itr.next(); if (entry == null || entry.getKey() == null) { continue; } builder.append(entry.getKey().toString()).append(": ").append(String.valueOf(entry.getValue())); if (itr.hasNext()) { builder.append(", "); } } return builder.toString(); }
From source file:xqpark.ParkApi.java
/** * ?URL/*from w w w . jav a 2 s .c o m*/ * @param urlStr?? * @return get??????keyvalue alue?MD5? * @throws UnsupportedEncodingException */ public static String getHashCode(String urlStr) throws UnsupportedEncodingException { String hashStr = ""; Map<String, String> keyMap = new TreeMap<String, String>(); time = Long.toString(System.currentTimeMillis()); String[] strarray = urlStr.split("&"); for (String temp : strarray) { if (temp.split("=").length == 2) { String value = temp.split("=")[1]; keyMap.put(temp.split("=")[0], value); } keyMap.put("jhappid", jhappid); keyMap.put("time", time); } Set<String> keySet = keyMap.keySet(); Iterator<String> iter = keySet.iterator(); while (iter.hasNext()) { String key = iter.next(); hashStr += keyMap.get(key); } hashStr += key; // System.out.println(hashStr); return hashStr; }
From source file:com.netspective.sparx.console.panel.data.schema.SchemaTablesPanel.java
public static List createStructureRows(Schemas schemas) { List rows = (List) rowsCache.get(schemas); if (rows != null) return rows; rows = new ArrayList(); for (int i = 0; i < schemas.size(); i++) { Schema schema = schemas.get(i); Schema.TableTree tree = schema.getStructure(); StructureRow schemaRow = new StructureRow(0, null, "Schema: '" + schema.getName() + "'"); rows.add(schemaRow);/* w ww. ja v a 2s. c om*/ StructureRow appTablesRow = new StructureRow(1, schemaRow, "Application Tables"); rows.add(appTablesRow); List appTableAncestors = new ArrayList(); appTableAncestors.add(schemaRow); appTableAncestors.add(appTablesRow); List children = tree.getChildren(); for (int c = 0; c < children.size(); c++) addStructurRow(rows, 2, (Schema.TableTreeNode) children.get(c), appTableAncestors); StructureRow variantRecTypeTablesRow = new StructureRow(1, schemaRow, "Variant Record Type Tables"); rows.add(variantRecTypeTablesRow); List variantRecTypeTableAncestors = new ArrayList(); variantRecTypeTableAncestors.add(schemaRow); variantRecTypeTableAncestors.add(variantRecTypeTablesRow); Set sortedVariantRecTypeTables = new TreeSet(BasicSchema.TABLE_COMPARATOR); Tables tables = schema.getTables(); for (int c = 0; c < tables.size(); c++) { Table table = tables.get(c); if (table instanceof EntityVariantRecordTypeTable) sortedVariantRecTypeTables.add(table); } for (Iterator iter = sortedVariantRecTypeTables.iterator(); iter.hasNext();) rows.add(new StructureRow(2, (EntityVariantRecordTypeTable) iter.next(), variantRecTypeTableAncestors)); StructureRow enumTablesRow = new StructureRow(1, schemaRow, "Enumeration Tables"); rows.add(enumTablesRow); List enumTableAncestors = new ArrayList(); enumTableAncestors.add(schemaRow); enumTableAncestors.add(enumTablesRow); Set sortedEnumTables = new TreeSet(BasicSchema.TABLE_COMPARATOR); tables = schema.getTables(); for (int c = 0; c < tables.size(); c++) { Table table = tables.get(c); if (table instanceof EnumerationTable) sortedEnumTables.add(table); } for (Iterator iter = sortedEnumTables.iterator(); iter.hasNext();) rows.add(new StructureRow(2, (EnumerationTable) iter.next(), enumTableAncestors)); } rowsCache.put(schemas, rows); return rows; }
From source file:it.geosolutions.imageio.plugins.nitronitf.NITFImageWriter.java
/** * Set a new Tagged Record Extension on top of the specified fields map. * // ww w . j a va 2 s.com * @param treName the name of the TRE to be setup * @param fieldsMap the map of fields <key,value> pairs * @return the populated TRE * @throws NITFException */ private static TRE setTRE(String treName, Map<String, String> fieldsMap) throws NITFException { TRE tre = new TRE(treName); Set<String> keysSet = fieldsMap.keySet(); Iterator<String> keys = keysSet.iterator(); while (keys.hasNext()) { String key = keys.next(); String value = fieldsMap.get(key); if (key.contains("[") && key.contains("]")) { // Fields involved in LOOPS require special management // Getting the field from the TRE won't work, therefore // we need to directly set it NITFUtilities.setTREFieldDirect(tre, key, value); } else { NITFUtilities.setTREField(tre, key, value, true); } } return tre; }
From source file:com.liferay.portal.action.LoginAction.java
public static void addSessionAttributes(HttpSession ses, HashMap map) { if (map == null) return;/*from w ww . j a v a2s .com*/ Set keys = map.keySet(); if (keys == null) return; Iterator iter = keys.iterator(); while (iter.hasNext()) { String name = (String) iter.next(); ses.setAttribute(name, map.get(name)); } }
From source file:it.geosolutions.imageio.plugins.nitronitf.NITFImageWriter.java
/** * /*w w w.ja va2 s.c o m*/ * @param subheader * @param isSingleBand * @param compression * @param tresMap * @param bpppb * @throws NITFException */ private static void initTREs(final ImageSubheader subheader, final ImageWrapper wrapper, final WriteCompression compression, final double bpppb) throws NITFException { Extensions extendedSection = subheader.getExtendedSection(); final boolean isSingleBand = wrapper.getImage().getSampleModel().getNumBands() == 1; final Map<String, Map<String, String>> tresMap = wrapper.getTres(); if (tresMap != null && !tresMap.isEmpty()) { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.log(Level.FINE, "Populating TRE"); } Set<String> keys = tresMap.keySet(); Iterator<String> it = keys.iterator(); while (it.hasNext()) { String treName = it.next(); Map<String, String> fieldsMapping = tresMap.get(treName); TRE tre = setTRE(treName, fieldsMapping); extendedSection.appendTRE(tre); } } if (compression != WriteCompression.UNCOMPRESSED) { //Setting the J2KLRA TRE in case the image need to be jp2 compressed setJ2KLRA(extendedSection, compression, isSingleBand, bpppb); } }
From source file:com.cloudera.whirr.cm.CmServerClusterInstance.java
public static Map<String, String> getDeviceMappings(ClusterSpec specification, Set<Instance> instances) { Map<String, String> deviceMappings = new HashMap<String, String>(); if (specification != null && instances != null && !instances.isEmpty()) { deviceMappings//from w w w .ja v a 2 s .c o m .putAll(new VolumeManager().getDeviceMappings(specification, instances.iterator().next())); } return deviceMappings; }
From source file:com.google.gsa.valve.modules.krb.KerberosAuthenticationProcess.java
/** * Gets the main principal from the user subject got as a result * of the Kerberos authentication process * //from ww w.j av a2 s. c o m * @param subject user subject * * @return the user principal */ public static String getPrincipalStr(Subject subject) { String principal = null; logger.debug("Getting principal from Subject"); try { Set principals = subject.getPrincipals(); if (!principals.isEmpty()) { logger.debug("Subject contains at least one Principal"); Iterator it = principals.iterator(); if (it.hasNext()) { Principal ppal = (Principal) it.next(); principal = ppal.getName().substring(0, ppal.getName().indexOf("@")); logger.debug("Getting the first principal: " + principal); } } } catch (Exception e) { logger.error("Error retrieving the client's Principal from the Subject: " + e.getMessage(), e); } return principal; }