Example usage for java.util HashMap values

List of usage examples for java.util HashMap values

Introduction

In this page you can find the example usage for java.util HashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:net.evecom.androidecssp.activity.taskresponse.TaskListActivity.java

/**
 * //  w w  w. jav  a2 s  .c  o m
 */
private void initlist() {
    new Thread(new Runnable() {
        @Override
        public void run() {
            Message message = new Message();

            try {
                HashMap<String, String> hashMap = new HashMap<String, String>();
                String url = "";
                if (!ifmytask) {
                    hashMap.put("eventId", eventInfo.get("id").toString());
                    hashMap.put("projectId", projectInfo.get("id").toString());
                    url = "jfs/ecssp/mobile/taskresponseCtr/getTaskByEventIdAndProjectId";
                } else {
                    hashMap.put("deptid", ShareUtil.getString(instance, "PASSNAME", "orgid", ""));
                    hashMap.put("userid", ShareUtil.getString(instance, "PASSNAME", "userid", ""));
                    url = "jfs/ecssp/mobile/taskresponseCtr/getTaskByDeptIdAndUserId";
                }
                System.out.println(hashMap.values().toArray().toString());
                resutArray = connServerForResultPost(url, hashMap);
            } catch (ClientProtocolException e) {
                message.what = MESSAGETYPE_02;
                Log.e("mars", e.getMessage());
            } catch (IOException e) {
                message.what = MESSAGETYPE_02;
                Log.e("mars", e.getMessage());
            }
            if (resutArray.length() > 0) {
                try {
                    taskInfos = getObjsInfo(resutArray);
                    if (null == taskInfos) {
                        message.what = MESSAGETYPE_02;
                    } else {
                        message.what = MESSAGETYPE_01;
                    }
                } catch (JSONException e) {
                    message.what = MESSAGETYPE_02;
                    Log.e("mars", e.getMessage());
                }
            } else {
                message.what = MESSAGETYPE_02;
            }
            Log.v("mars", resutArray);
            eventListHandler.sendMessage(message);

        }
    }).start();

}

From source file:org.apache.axis2.deployment.util.Utils.java

/**
 * Modules can contain services in some cases.  This method will deploy all the services
 * for a given AxisModule into the current AxisConfiguration.
 * <p>//from ww w.  j  a va2  s  .c  om
 * The code looks for an "aars/" directory inside the module (either .mar or exploded),
 * and an "aars.list" file inside that to figure out which services to deploy.  Note that all
 * services deployed this way will have access to the Module's classes.
 * </p>
 *
 * @param module the AxisModule to search for services
 * @param configCtx ConfigurationContext in which to deploy
 */

public static void deployModuleServices(AxisModule module, ConfigurationContext configCtx) throws AxisFault {
    try {
        AxisConfiguration axisConfig = configCtx.getAxisConfiguration();
        ArchiveReader archiveReader = new ArchiveReader();
        PhasesInfo phasesInfo = axisConfig.getPhasesInfo();
        final ClassLoader moduleClassLoader = module.getModuleClassLoader();
        ArrayList services = new ArrayList();
        final InputStream in = (InputStream) org.apache.axis2.java.security.AccessController
                .doPrivileged(new PrivilegedAction() {
                    public Object run() {
                        return moduleClassLoader.getResourceAsStream("aars/aars.list");
                    }
                });
        if (in != null) {
            BufferedReader input;
            try {
                input = new BufferedReader((InputStreamReader) org.apache.axis2.java.security.AccessController
                        .doPrivileged(new PrivilegedAction() {
                            public Object run() {
                                return new InputStreamReader(in);
                            }
                        }));
                String line;
                while ((line = input.readLine()) != null) {
                    line = line.trim();
                    if (line.length() > 0 && line.charAt(0) != '#') {
                        services.add(line);
                    }
                }
                input.close();
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        }
        if (services.size() > 0) {
            for (Object service1 : services) {
                final String servicename = (String) service1;
                if (servicename == null || "".equals(servicename)) {
                    continue;
                }
                InputStream fin = (InputStream) org.apache.axis2.java.security.AccessController
                        .doPrivileged(new PrivilegedAction() {
                            public Object run() {
                                return moduleClassLoader.getResourceAsStream("aars/" + servicename);
                            }
                        });
                if (fin == null) {
                    throw new AxisFault("No service archive found : " + servicename);
                }
                File inputFile = Utils.createTempFile(servicename, fin,
                        (File) axisConfig.getParameterValue(Constants.Configuration.ARTIFACTS_TEMP_DIR));
                DeploymentFileData filedata = new DeploymentFileData(inputFile);

                filedata.setClassLoader(false, moduleClassLoader,
                        (File) axisConfig.getParameterValue(Constants.Configuration.ARTIFACTS_TEMP_DIR),
                        axisConfig.isChildFirstClassLoading());
                HashMap wsdlservice = archiveReader.processWSDLs(filedata);
                if (wsdlservice != null && wsdlservice.size() > 0) {
                    Iterator servicesitr = wsdlservice.values().iterator();
                    while (servicesitr.hasNext()) {
                        AxisService service = (AxisService) servicesitr.next();
                        Iterator operations = service.getOperations();
                        while (operations.hasNext()) {
                            AxisOperation axisOperation = (AxisOperation) operations.next();
                            phasesInfo.setOperationPhases(axisOperation);
                        }
                    }
                }
                AxisServiceGroup serviceGroup = new AxisServiceGroup(axisConfig);
                serviceGroup.setServiceGroupClassLoader(filedata.getClassLoader());
                ArrayList serviceList = archiveReader.processServiceGroup(filedata.getAbsolutePath(), filedata,
                        serviceGroup, false, wsdlservice, configCtx);
                for (Object aServiceList : serviceList) {
                    AxisService axisService = (AxisService) aServiceList;
                    Parameter moduleService = new Parameter();
                    moduleService.setValue("true");
                    moduleService.setName(AxisModule.MODULE_SERVICE);
                    axisService.addParameter(moduleService);
                    serviceGroup.addService(axisService);
                }
                axisConfig.addServiceGroup(serviceGroup);
                fin.close();
            }
        }
    } catch (IOException e) {
        throw AxisFault.makeFault(e);
    }
}

From source file:org.apache.carbondata.spark.partition.api.impl.CSVFilePartitioner.java

private void partitionData(String targetFolder, List<String> nodes, int partitionCount,
        String[] partitionColumn, String[] headerColumns, HashMap<Partition, CSVWriter> outputStreamsMap,
        CSVReader dataInputStream, long recordCounter, String fileName, int[] indexes, String fileAbsolutePath)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException, IOException {
    DataPartitioner dataPartitioner = getDataPartitioner(targetFolder, nodes, partitionCount, partitionColumn,
            headerColumns);/* w w  w  .j a  v  a 2s.  c om*/

    //Get partitions and create output streams
    List<Partition> allPartitions = dataPartitioner.getAllPartitions();

    loopPartitionsAndPopulateOutStreamMap(outputStreamsMap, fileName, allPartitions);

    //Write header in all the target files
    for (CSVWriter dataOutStream : outputStreamsMap.values()) {
        dataOutStream.writeNext(pruneColumns(headerColumns, indexes));
    }

    recordCounter = writeTargetStream(outputStreamsMap, dataInputStream, recordCounter, indexes,
            dataPartitioner, headerColumns, fileAbsolutePath);

    LOGGER.info("Processed Record count: " + recordCounter);
}

From source file:com.uber.hoodie.common.model.HoodieTableMetadata.java

/**
 * Get only the latest file in the partition with precondition commitTime(file) lt maxCommitTime
 *
 * @param fs/*from   w  ww .  j a va  2 s  .com*/
 * @param partitionPathStr
 * @param maxCommitTime
 * @return
 */
public FileStatus[] getLatestVersionInPartition(FileSystem fs, String partitionPathStr, String maxCommitTime) {
    try {
        Path partitionPath = new Path(basePath, partitionPathStr);
        if (!fs.exists(partitionPath)) {
            return new FileStatus[0];
        }
        FileStatus[] files = fs.listStatus(partitionPath);
        Map<String, List<FileStatus>> fileIdToVersions = groupFilesByFileId(files, commits.lastCommit());
        HashMap<String, FileStatus> validFiles = new HashMap<>();
        for (String fileId : fileIdToVersions.keySet()) {
            List<FileStatus> versions = fileIdToVersions.get(fileId);
            for (FileStatus file : versions) {
                String filename = file.getPath().getName();
                String commitTime = FSUtils.getCommitTime(filename);
                if (HoodieCommits.isCommit1BeforeOrOn(commitTime, maxCommitTime)) {
                    validFiles.put(fileId, file);
                    break;
                }
            }
        }
        return validFiles.values().toArray(new FileStatus[validFiles.size()]);
    } catch (IOException e) {
        throw new HoodieIOException("Could not get latest versions in Partition " + partitionPathStr, e);
    }
}

From source file:com.vmware.thinapp.vi.InventoryBrowser.java

/**
 * Method to filter out TAF VMs (appliances, templates, and capture
 * instances managed by this appliances as well as others) from the
 * input parameter 'vms'. Returns only VMs that does not belong to
 * any TAF server. This method also update the tafVMs and othersVMs
 * lists on seeing new VMs.//from w ww.jav  a  2s . c  o m
 *
 * @param vms all VMs to be checked.
 *
 * @return VMs that not belong to any TAF server.
 */
private VirtualMachine[] filterOutTAFVms(VirtualMachine vms[]) throws RemoteException {
    HashMap<String, VirtualMachine> vmMap = new HashMap<String, VirtualMachine>();

    for (VirtualMachine vm : vms) {
        String moid = vm.getMOR().get_value();
        vmMap.put(moid, vm);
    }

    // Check the tafVMs list.
    for (String moid : tafVMs) {
        if (vmMap.remove(moid) != null) {
            log.trace("VM {} in known TAF VM list is filtered out.", moid);
        }
    }

    // Check the othersVMs list, os type, machine.id etc.
    Iterator<VirtualMachine> itr = vmMap.values().iterator();
    while (itr.hasNext()) {
        VirtualMachine vm = itr.next();
        String moid = vm.getMOR().get_value();

        if (othersVMs.contains(moid)) {
            continue;
        }

        // We haven't seen this VM previously. check its guest id against
        // support guest id list.
        VirtualMachineConfigInfo info = vm.getConfig();
        String guestId = info.getGuestId();
        if (!VIConstants.GUEST_ID_SUPPOTRED.contains(guestId)) {
            tafVMs.add(moid);
            itr.remove();
            log.trace("VM {} of {} guest is filtered out.", moid, guestId);
            continue;
        }

        // It is not a Linux VM, we check its machine.id
        OptionValue[] extraConfig = info.getExtraConfig();
        if (extraConfig == null) {
            // No extra config, it is not a TAF VM. Hence, add it to the
            // othersVMs
            othersVMs.add(moid);
            continue;
        }

        boolean isTAFVm = false;

        for (OptionValue optval : extraConfig) {
            if (optval.getKey().equals(TAF_VM_KEY)) {
                isTAFVm = true;
                break;
            }
        }

        if (isTAFVm) {
            // It is a TAF VM, add to the known TAF VM list and remove from
            // display VM list.
            tafVMs.add(moid);
            itr.remove();
            log.trace("VM {} is managed by a TAF and is filtered out.", vm.getName());
        } else {
            // It is not a TAF VM, add to the others VM list to reduce further
            // VC call.
            othersVMs.add(moid);
            log.trace("Added VM {} to othersVMs.", vm.getName());
        }
    }

    // What left in the vmMap are all non-TAF VMs.
    return vmMap.values().toArray(new VirtualMachine[] {});
}

From source file:org.apache.nutchbase.indexer.IndexingFiltersHbase.java

public IndexingFiltersHbase(Configuration conf) {
    /* Get indexingfilter.order property */
    String order = conf.get(INDEXINGFILTER_ORDER);
    ObjectCache objectCache = ObjectCache.get(conf);
    this.indexingFilters = (IndexingFilterHbase[]) objectCache.getObject(IndexingFilterHbase.class.getName());
    if (this.indexingFilters == null) {
        /*/*from  w w  w .  j  a  v  a2  s. com*/
         * If ordered filters are required, prepare array of filters based on
         * property
         */
        String[] orderedFilters = null;
        if (order != null && !order.trim().equals("")) {
            orderedFilters = order.split("\\s+");
        }
        try {
            ExtensionPoint point = PluginRepository.get(conf).getExtensionPoint(IndexingFilterHbase.X_POINT_ID);
            if (point == null)
                throw new RuntimeException(IndexingFilterHbase.X_POINT_ID + " not found.");
            Extension[] extensions = point.getExtensions();
            HashMap<String, IndexingFilterHbase> filterMap = new HashMap<String, IndexingFilterHbase>();
            for (int i = 0; i < extensions.length; i++) {
                Extension extension = extensions[i];
                IndexingFilterHbase filter = (IndexingFilterHbase) extension.getExtensionInstance();
                LOG.info("Adding " + filter.getClass().getName());
                if (!filterMap.containsKey(filter.getClass().getName())) {
                    filter.addIndexBackendOptions(conf);
                    filterMap.put(filter.getClass().getName(), filter);
                }
            }
            /*
             * If no ordered filters required, just get the filters in an
             * indeterminate order
             */
            if (orderedFilters == null) {
                objectCache.setObject(IndexingFilterHbase.class.getName(),
                        filterMap.values().toArray(new IndexingFilterHbase[0]));
                /* Otherwise run the filters in the required order */
            } else {
                ArrayList<IndexingFilterHbase> filters = new ArrayList<IndexingFilterHbase>();
                for (int i = 0; i < orderedFilters.length; i++) {
                    IndexingFilterHbase filter = filterMap.get(orderedFilters[i]);
                    if (filter != null) {
                        filter.addIndexBackendOptions(conf);
                        filters.add(filter);
                    }
                }
                objectCache.setObject(IndexingFilterHbase.class.getName(),
                        filters.toArray(new IndexingFilterHbase[filters.size()]));
            }
        } catch (PluginRuntimeException e) {
            throw new RuntimeException(e);
        }
        this.indexingFilters = (IndexingFilterHbase[]) objectCache
                .getObject(IndexingFilterHbase.class.getName());
    }
}

From source file:de.tuebingen.uni.sfs.germanet.api.GermaNet.java

/**
 * Returns a <code>List</code> of all <code>LexUnits</code> in the specified
 * <code>wordCategory</code>.
 * @param wordCategory the <code>WordCategory</code>, (e.g.
 * <code>WordCategory.verben</code>)
 * @return a <code>List</code> of all <code>LexUnits</code> in the specified
 * <code>wordCategory</code>. If no <code>LexUnits</code> were found, this
 * is a <code>List</code> containing no <code>LexUnits</code>.
 *//* w  w w  .  j  a v a  2s  . com*/
public List<LexUnit> getLexUnits(WordCategory wordCategory) {
    ArrayList<LexUnit> rval = new ArrayList<LexUnit>();
    HashMap<String, ArrayList<LexUnit>> map;
    map = wordCategoryMap.get(wordCategory);

    for (ArrayList<LexUnit> luList : map.values()) {
        rval.addAll((ArrayList<LexUnit>) luList.clone());
    }
    rval.trimToSize();
    return rval;
}

From source file:gov.llnl.lc.smt.command.port.SmtPort.java

/**
 * Describe the method here//from   ww  w. ja v  a2s.  c om
 *
 * @see     describe related java objects
 *
 * @param subCommandArg
 * @return
 ***********************************************************/
private OSM_Port getOSM_PortByString(String subCommandArg) {
    // return the first match
    HashMap<String, OSM_Port> ports = getOSM_PortsByString(subCommandArg);
    if (ports.isEmpty())
        return null;

    return ports.values().iterator().next();
}

From source file:edu.ku.brc.af.core.TaskMgr.java

/**
 * @param path//  w w w .ja  v a2  s. c om
 * @param plugins
 * @param isMobile
 */
private void readRegistry(final HashMap<String, PluginInfo> plugins,
        final HashMap<String, PluginInfo> uiPlugins) {
    try {
        Vector<PluginInfo> list = new Vector<TaskMgr.PluginInfo>(plugins.values());
        Collections.sort(list);

        for (PluginInfo pi : list) {
            String prefName = pi.getPrefName();
            if (prefName != null) {
                if (!AppPreferences.getLocalPrefs().getBoolean(prefName, false)) {
                    continue;
                }
            }

            Object newObj = null;
            try {
                newObj = Class.forName(pi.getClassName()).asSubclass(Taskable.class).newInstance();

            } catch (Exception ex) {
                if (StringUtils.isEmpty(prefName)) {
                    log.error(ex);
                    UIRegistry.showError(String.format(
                            "The plugin '%s' could not be loaded.\nPlease contact cutomer suppoer.",
                            pi.getPluginName()));
                    //ex.printStackTrace();
                    //edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount();
                    //edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(TaskMgr.class, ex);
                }

                // go to the next plugin
                continue;
                // XXX Do we need a dialog here ???
            }

            if (newObj instanceof Taskable) {
                Taskable task = (Taskable) newObj;

                boolean shouldAddToUI = pi.getIsAddToUI();
                if (AppContextMgr.isSecurityOn()) {
                    PermissionIFace perm = task.getPermissions();
                    if (perm != null) {
                        if (!perm.canView()) {
                            shouldAddToUI = false;
                            task.setEnabled(false);
                        }
                    }
                }

                boolean isTaskDefault = pi.getIsDefault();
                if (isTaskDefault) {
                    if (instance.defaultTask == null) {
                        instance.defaultTask = task;
                    } else {
                        log.error("More than one plugin thinks it is the default[" + task.getName() + "]"); //$NON-NLS-1$ //$NON-NLS-2$
                    }
                }

                register(task, shouldAddToUI); //$NON-NLS-1$

            } else {
                log.error("Oops, the plugin is not instance of Taskable [" + newObj + "]"); //$NON-NLS-1$ //$NON-NLS-2$
                // XXX Need to display an error
            }
        }

        for (PluginInfo uiPI : uiPlugins.values()) {
            try {
                Class<?> cls = Class.forName(uiPI.getClassName()).asSubclass(UIPluginable.class);
                //log.debug("Registering ["+name+"] Class["+cls.getName()+"]"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
                uiPluginHash.put(uiPI.getPluginName(), cls);

            } catch (Exception ex) {
                log.error(ex);
                ex.printStackTrace();
                edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount();
                edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(TaskMgr.class, ex);
            }
        }

    } catch (Exception ex) {
        edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount();
        edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(TaskMgr.class, ex);
        ex.printStackTrace();
        log.error(ex);
    }
}

From source file:net.sf.maltcms.chromaui.project.spi.runnables.CondensePeakAnnotationsRunnable.java

@Override
public void run() {
    try {/*from w  w w.  j av  a 2s .com*/
        progressHandle.start(3);
        progressHandle.progress("Retrieving Tool Descriptors", 1);
        Collection<? extends IToolDescriptor> selectedTools = Dialogs.showAndSelectDescriptors(
                project.getToolsForPeakContainers(), Lookups.singleton(project), true, IToolDescriptor.class,
                "Condense Peaks", "Check Peak Tool Results to Condense");
        if (!selectedTools.isEmpty()) {
            progressHandle.progress("Retrieving Peak Containers for " + selectedTools.size() + " Tools", 2);
            List<Peak1DContainer> peakContainers = new ArrayList<>();
            for (IChromatogramDescriptor chrom : project.getChromatograms()) {
                for (Peak1DContainer container : project.getPeaks(chrom)) {
                    if (selectedTools.contains(container.getTool())) {
                        peakContainers.add(container);
                    }
                }
            }
            File basedir = project.getImportLocation(this);
            for (Peak1DContainer container : peakContainers) {
                if (isCancel()) {
                    return;
                }
                SummaryStatistics stats = new SummaryStatistics();
                HistogramDataset hd = new HistogramDataset();
                ArrayDouble.D2 pwd = new ArrayDouble.D2(container.getMembers().size(),
                        container.getMembers().size());
                int i = 0, j;
                ArrayList<IPeakAnnotationDescriptor> al = new ArrayList<>(container.getMembers());
                HashMap<PeakFeatureVector, Clique<PeakFeatureVector>> cliques = new LinkedHashMap<>();
                for (IPeakAnnotationDescriptor ipad1 : al) {
                    Clique<PeakFeatureVector> c = new Clique<>(new PeakFeatureVectorComparator(),
                            new PeakCliqueRTDiffMemberCriterion(), new PeakCliqueUpdater());
                    PeakFeatureVector pfv = new PeakFeatureVector(ipad1);
                    c.add(pfv);
                    cliques.put(pfv, c);
                }
                boolean done = false;
                while (!done) {
                    for (Clique<PeakFeatureVector> pfv1 : cliques.values()) {
                        for (Clique<PeakFeatureVector> pfv2 : cliques.values()) {
                            Clique<PeakFeatureVector> jointClique = new Clique<>(
                                    new PeakFeatureVectorComparator(), new PeakCliqueRTDiffMemberCriterion(),
                                    new PeakCliqueUpdater());
                            Set<PeakFeatureVector> vectors = new LinkedHashSet<>();
                        }
                    }
                }
                for (Clique<PeakFeatureVector> pfv1 : cliques.values()) {
                    for (Clique<PeakFeatureVector> pfv2 : cliques.values()) {
                        Clique<PeakFeatureVector> jointClique = new Clique<>(new PeakFeatureVectorComparator(),
                                new PeakCliqueRTDiffMemberCriterion(), new PeakCliqueUpdater());
                        Set<PeakFeatureVector> vectors = new LinkedHashSet<>();
                        for (PeakFeatureVector p1 : pfv1.getFeatureVectorList()) {
                            vectors.add(p1);
                            for (PeakFeatureVector p2 : pfv2.getFeatureVectorList()) {
                                vectors.add(p2);
                                if (pfv1.add(p2)) {
                                    jointClique.add(p2);
                                } else {
                                    vectors.remove(p2);
                                }
                                if (pfv2.add(p1)) {
                                    jointClique.add(p1);
                                } else {
                                    vectors.remove(p1);
                                }
                            }
                        }
                        //jointClique.add
                    }
                }

                System.out.println(stats);
                double snr = stats.getMean() / stats.getStandardDeviation();
                Logger.getLogger(getClass().getName()).log(Level.INFO, "SNR: {0}", snr);
                //                    for (int u = 0; u < pwd.getShape()[0]; u++) {
                //                        for (int v = 0; v < pwd.getShape()[1]; v++) {
                //                        }
                //                    }
                saveHistogramChart(hd, new File(basedir, container.getChromatogram().getDisplayName() + "-"
                        + container.getDisplayName() + "-similarity-histogram.png"));
                BufferedImage bi = ImageTools.makeImage2D(pwd, 256);
                ImageIO.write(bi, "PNG", new File(basedir, container.getChromatogram().getDisplayName() + "-"
                        + container.getDisplayName() + ".png"));
            }
            progressHandle.progress("Calculating pairwise peak RTs", 3);
            project.refresh();

        } else {
            Logger.getLogger(CondensePeakAnnotationsRunnable.class.getName()).log(Level.INFO,
                    "IToolDescriptor selection was empty!");
        }
    } catch (IllegalArgumentException | IOException e) {
        Exceptions.printStackTrace(e);
    } finally {
        progressHandle.finish();
    }
}