Example usage for java.util HashMap entrySet

List of usage examples for java.util HashMap entrySet

Introduction

In this page you can find the example usage for java.util HashMap entrySet.

Prototype

Set entrySet

To view the source code for java.util HashMap entrySet.

Click Source Link

Document

Holds cached entrySet().

Usage

From source file:it.polito.tellmefirst.web.rest.clients.ClientEpub.java

private void removeChapterFromString(HashMap lhm, String uselessText) {

    LOG.debug("[removeChapter] - BEGIN");

    Set set = lhm.entrySet();
    Iterator i = set.iterator();/*w w w .j a v a2s.  c o m*/

    while (i.hasNext()) {
        Map.Entry me = (Map.Entry) i.next();
        if (me.getValue().toString().contains(uselessText)) {
            i.remove();
        }
    }

    LOG.debug("[removeChapter] - END");

}

From source file:com.thoratou.exact.processors.ExactProcessor.java

private void writeSources(HashMap<String, List<PathStep>> mergedMap)
        throws IOException, ClassNotFoundException, ExactException {
    //use all annotation data to generate parsing files
    for (Map.Entry<String, List<PathStep>> entryList : mergedMap.entrySet()) {
        VelocityEngine engine = new VelocityEngine();
        //needed it avoid global instead of local variable modification
        engine.setProperty(RuntimeConstants.VM_CONTEXT_LOCALSCOPE, true);

        //read file into JAR
        InputStream configStream = getClass().getResourceAsStream("/xmlreader.vm");
        BufferedReader configReader = new BufferedReader(new InputStreamReader(configStream, "UTF-8"));

        String className = entryList.getKey();
        List<PathStep> steps = entryList.getValue();

        VelocityContext context = new VelocityContext();
        context.put("class", className);
        //logger.info("class : "+className);

        //ugly temp code
        String[] split = className.split("\\.");
        StringBuffer packageBuffer = new StringBuffer();
        for (int i = 0; i < split.length - 1; i++) {
            packageBuffer.append(split[i]);
            if (i != split.length - 2) {
                packageBuffer.append(".");
            }/*  w w  w  . java 2 s . c o  m*/
        }
        String packageName = packageBuffer.toString();
        //logger.info("package : "+packageName);
        context.put("package", packageName);
        String simpleName = split[split.length - 1];
        //logger.info("simpleclass : "+simpleName);
        context.put("simpleclass", simpleName);

        context.put("steps", steps);
        context.put("kindmap", PathStep.ReverseKindMap);
        context.put("startmap", PathStep.ReverseStartMap);
        context.put("typemap", PathStep.ReverseTypeMap);
        context.put("listtypemap", PathStep.ReverseListTypeMap);
        context.put("indentutil", new IndentUtil());
        context.put("processingutil", new ProcessingUtil());

        Set<String> bomList = new HashSet<String>();
        registerBomListFromSteps(steps, bomList);
        context.put("bomlist", bomList);

        Set<String> extensionList = new HashSet<String>();
        Map<String, ExtensionVelocityData> extensionMap = new HashMap<String, ExtensionVelocityData>();
        registerExtensionListFromSteps(steps, extensionList, extensionMap);
        context.put("extensionlist", extensionList);
        context.put("extensionmap", extensionMap);

        logger.info("input velocity data : " + className + " , " + steps.toString());

        //StringWriter writer = new StringWriter();
        //String packagePath = packageName.replace(".","/");
        //String fullFile = packagePath+"/"+simpleName+"XmlReader.java";
        //logger.info(fullFile);

        Filer filer = processingEnv.getFiler();
        JavaFileObject sourceFile = filer.createSourceFile(className + "XmlReader");
        Writer sourceWriter = sourceFile.openWriter();

        engine.evaluate(context, sourceWriter, "", configReader);

        sourceWriter.close();
        sourceFile.delete();

        //logger.info("final velocity data : "+writer.getBuffer().toString());
    }
}

From source file:it.polito.tellmefirst.web.rest.clients.ClientEpub.java

private void removeChapter(HashMap lhm, String chapterTitle) {

    LOG.debug("[removeChapter] - BEGIN");

    Set set = lhm.entrySet();
    Iterator i = set.iterator();//from  w w w .  ja va2  s .c  om

    while (i.hasNext()) {
        Map.Entry me = (Map.Entry) i.next();
        if (me.getKey().toString().equals(chapterTitle)) {
            LOG.info("Remove useless chapter: " + me.getKey().toString());
            i.remove();
        }
    }

    LOG.debug("[removeChapter] - END");

}

From source file:com.jci.job.repo.JobRepoImpl.java

@Override
public synchronized BatchUpdateRes batchUpdate(BatchUpdateReq request) {
    int errorCount = 0;
    int successCount = 0;
    BatchUpdateRes response = new BatchUpdateRes();

    String erpName = request.getErpName();
    HashMap<String, List<PoEntity>> tableNameToEntityMap = request.getTableNameToEntityMap();

    List<String> errorList = new ArrayList<>();
    List<String> successList = new ArrayList<>();

    CloudTable cloudTable = null;//from  www.j ava  2  s. c om
    PoEntity entity = null;
    String tableName = null;

    for (Map.Entry<String, List<PoEntity>> entry : tableNameToEntityMap.entrySet()) {
        try {
            cloudTable = azureStorage.getTable(entry.getKey());
            tableName = entry.getKey();
        } catch (Exception e) {
            LOG.error("### Exception in JobRepoImpl.batchUpdate.getTable ###" + e);
            response.setError(true);
            response.setMessage("The Application has encountered an error! Table  does not exist !");
            throw errorService.createException(JobException.class, e, ErrorEnum.ERROR_TABLE_NOT_FOUND,
                    entry.getKey());
        }

        // Define a batch operation.
        TableBatchOperation batchOperation = new TableBatchOperation();
        List<PoEntity> value = entry.getValue();

        for (int i = 0; i < value.size(); i++) {
            entity = value.get(i);
            //counter= counter+1;

            entity.setGlobalId(request.getGlobalId());
            entity.setUserName(request.getUserName());
            entity.setComment(request.getComment());

            if (request.isSuccess()) {//Means we are updating(success) status for pos which has been successfully processed to e2open
                entity.setSupplierDeliveryState(Constants.STATUS_SUCCESS);
                successCount = successCount + 1;
                successList.add(entity.getRowKey());
            } else {//Request is for error status update
                entity.setSupplierDeliveryState(Constants.STATUS_ERROR);
                errorCount = errorCount + 1;
                errorList.add(entity.getRowKey());
            }

            batchOperation.insertOrMerge(entity);
            if (i != 0 && (i % batchSize) == 0) {
                try {
                    cloudTable.execute(batchOperation);
                    batchOperation.clear();
                } catch (Exception e) {
                    response.setError(true);
                    response.setMessage("The Application has encountered an error!");
                    if (request.isSuccess()) {
                        successCount = successCount - 1;
                    } else {
                        errorCount = errorCount - 1;
                    }
                    LOG.error("### Exception in JobRepoImpl.batchUpdate.execute ###" + e);

                    continue;
                }
            }
        }

        if (batchOperation.size() > 0) {
            try {
                cloudTable.execute(batchOperation);
            } catch (Exception e) {
                response.setError(true);
                response.setMessage("The Application has encountered an error!");
                if (request.isSuccess()) {
                    successCount = successCount - 1;
                } else {
                    errorCount = errorCount - 1;
                }
                LOG.error("### Exception in JobRepoImpl.batchUpdate.execute ###" + e);
                continue;
            }
        }
    }
    response.setErrorList(errorList);
    response.setSuccessList(successList);
    updateMiscEntity(erpName, tableName, successCount, errorCount);
    return response;

}

From source file:de.tudarmstadt.tk.statistics.importer.ExternalResultsReader.java

public static void readMUGCCV(String filePath) {
    String outFileName = "AggregatedTrainTest.csv";

    logger.log(Level.INFO, String.format("Importing data from directory %s.", filePath));

    // Method requires input directory. Check this condition.
    File directory = new File(filePath);
    if (directory.isDirectory()) {
        System.err.println("Please specify a file. Aborting.");
        return;/*from  ww w  .ja  va  2  s . c o m*/
    }

    //Empty previous output file, if there was one
    File outputFile = new File(directory.getParentFile(), outFileName);
    if (outputFile.exists()) {
        outputFile.delete();
    }
    try {
        String header = "Train;Test;Classifier;FeatureSet;Measure;Value";

        PrintWriter out = new PrintWriter(new FileWriter(outputFile, true));
        out.println(header);
        out.close();
    } catch (IOException e) {
        System.err.println("Error while writing aggregated Train-Test file.");
        e.printStackTrace();
    }

    ArrayList<String> outputRows = new ArrayList<String>();

    // iterate all rows
    List<String[]> inputRowsFirstFile = new ArrayList<>();
    inputRowsFirstFile = readAndCheckCSV(filePath, ';');

    // first: order by train set
    ArrayList<ExternalResults> extResults = new ArrayList<>();

    for (int i = 0; i < inputRowsFirstFile.size(); i++) {
        ExternalResults results = new ExternalResults();

        // identify current train/test split
        String[] datasetNames = inputRowsFirstFile.get(i)[0].split(",");
        results.trainSetName = datasetNames[0].replace("CV: ", "").replace(" ", "");

        // set classifier name
        results.classifierParameters = inputRowsFirstFile.get(i)[1];

        // read feature set
        results.featureSetName = inputRowsFirstFile.get(i)[2];

        // read classification results
        results.recall = Double.parseDouble(inputRowsFirstFile.get(i)[3]);
        results.fMeasure = Double.parseDouble(inputRowsFirstFile.get(i)[4]);
        results.precision = Double.parseDouble(inputRowsFirstFile.get(i)[5]);
        results.accuracy = Double.parseDouble(inputRowsFirstFile.get(i)[10]) / 100;

        extResults.add(results);
    }

    HashMap<String, ArrayList<ExternalResults>> extResultsByTrainTestFeature = new HashMap<>();

    // order by test set
    for (ExternalResults result : extResults) {
        String IdKey = result.trainSetName + result.testSetName + result.featureSetName;

        if (extResultsByTrainTestFeature.containsKey(IdKey)) {
            extResultsByTrainTestFeature.get(IdKey).add(result);
        } else {
            extResultsByTrainTestFeature.put(IdKey, new ArrayList<ExternalResults>());
            extResultsByTrainTestFeature.get(IdKey).add(result);
        }
    }

    ArrayList<ExternalResults> aggregatedResults = new ArrayList<>();

    // aggregate results or keep as are
    for (Entry<String, ArrayList<ExternalResults>> trainTestSplit : extResultsByTrainTestFeature.entrySet()) {
        ExternalResults aggrResult = new ExternalResults();

        double recall = 0;
        double fMeasure = 0;
        double precision = 0;
        double accuracy = 0;
        int nrClassifiers = 0;

        // for all entries that are from the same train/test split and use the same feature set -> aggregate results
        for (ExternalResults result : trainTestSplit.getValue()) {
            aggrResult.testSetName = result.testSetName;
            aggrResult.trainSetName = result.trainSetName;
            aggrResult.classifierParameters = result.classifierParameters;
            aggrResult.featureSetName = result.featureSetName;

            recall += result.recall;
            fMeasure += result.fMeasure;
            precision += result.precision;
            accuracy += result.accuracy;
            nrClassifiers++;
        }

        aggrResult.accuracy = (accuracy / nrClassifiers);
        aggrResult.fMeasure = (fMeasure / nrClassifiers);
        aggrResult.recall = (recall / nrClassifiers);
        aggrResult.precision = (precision / nrClassifiers);

        aggregatedResults.add(aggrResult);
    }

    // write values of measure
    for (ExternalResults result : aggregatedResults) {
        String outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0",
                result.featureSetName, "Percent Correct", result.accuracy);
        outputRows.add(outputRow);

        outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0",
                result.featureSetName, "Weighted Precision", result.precision);
        outputRows.add(outputRow);

        outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0",
                result.featureSetName, "Weighted Recall", result.recall);
        outputRows.add(outputRow);

        outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0",
                result.featureSetName, "Weighted F-Measure", result.fMeasure);
        outputRows.add(outputRow);

    }

    // Write aggregated data to a new file
    try {
        PrintWriter out = new PrintWriter(new FileWriter(outputFile, true));
        for (String s : outputRows) {
            out.println(s);
        }
        out.close();
    } catch (IOException e) {
        System.err.println("Error while writing aggregated Train-Test file.");
        e.printStackTrace();
    }

    logger.log(Level.INFO,
            String.format("Finished import. The aggregated data was written to %s.", outFileName));
}

From source file:cr.ac.siua.tec.services.impl.RTServiceImpl.java

/**
 * Builds a string with the structure needed to post a new ticket to RT REST API.
 *//*from  w  ww. j  a  v  a  2  s  .  c o  m*/
public String getTicketParamsString(HashMap<String, String> formValues) {
    formValues.remove("g-recaptcha-response");
    StringBuilder sb = new StringBuilder();

    //Manually appends the queue, subject and requestor fields.
    sb.append("Queue: " + formValues.get("Queue") + "\n");
    sb.append("Subject: Solicitado por " + formValues.get("RequestorName") + "\n");
    sb.append("Requestor: " + formValues.get("Requestor") + "\n");
    formValues.remove("Queue");
    formValues.remove("Requestor");
    formValues.remove("RequestorName");

    //Iterates through all custom fields.
    String fieldName, fieldValue;
    for (Map.Entry<String, String> entry : formValues.entrySet()) {
        fieldName = entry.getKey();
        fieldValue = entry.getValue();
        String lines[] = fieldValue.split("\n");
        if (lines.length > 1) {
            fieldValue = "";
            for (String line : lines)
                fieldValue += " " + line + "\n";
        }
        sb.append("CF-" + fieldName + ": " + fieldValue + "\n");
    }
    return sb.toString();
}

From source file:de.tudarmstadt.tk.statistics.importer.ExternalResultsReader.java

public static void readMUGCTrainTest(String filePath) {
    String outFileName = "AggregatedTrainTest.csv";

    logger.log(Level.INFO, String.format("Importing data from directory %s.", filePath));

    // Method requires input directory. Check this condition.
    File directory = new File(filePath);
    if (directory.isDirectory()) {
        System.err.println("Please specify a file. Aborting.");
        return;//  w  w  w  . j  a  va 2 s  .c  o  m
    }

    //Empty previous output file, if there was one
    File outputFile = new File(directory.getParentFile(), outFileName);
    if (outputFile.exists()) {
        outputFile.delete();
    }
    try {
        String header = "Train;Test;Classifier;FeatureSet;Measure;Value";

        PrintWriter out = new PrintWriter(new FileWriter(outputFile, true));
        out.println(header);
        out.close();
    } catch (IOException e) {
        System.err.println("Error while writing aggregated Train-Test file.");
        e.printStackTrace();
    }

    ArrayList<String> outputRows = new ArrayList<String>();

    // iterate all rows
    List<String[]> inputRowsFirstFile = new ArrayList<>();
    inputRowsFirstFile = readAndCheckCSV(filePath, ';');

    // first: order by train set
    ArrayList<ExternalResults> extResults = new ArrayList<>();

    for (int i = 0; i < inputRowsFirstFile.size(); i++) {
        ExternalResults results = new ExternalResults();

        // identify current train/test split
        String[] datasetNames = inputRowsFirstFile.get(i)[0].replace("TRAIN:", "").replace("TEST:", "")
                .split(",");
        results.trainSetName = datasetNames[0].replace(" ", "");
        results.testSetName = datasetNames[1].replace(" ", "");

        // set classifier name
        results.classifierParameters = inputRowsFirstFile.get(i)[1];

        // read feature set
        results.featureSetName = inputRowsFirstFile.get(i)[2];

        // read classification results
        results.recall = Double.parseDouble(inputRowsFirstFile.get(i)[3]);
        results.fMeasure = Double.parseDouble(inputRowsFirstFile.get(i)[4]);
        results.precision = Double.parseDouble(inputRowsFirstFile.get(i)[5]);
        results.accuracy = Double.parseDouble(inputRowsFirstFile.get(i)[10]) / 100;

        extResults.add(results);
    }

    HashMap<String, ArrayList<ExternalResults>> extResultsByTrainTestFeature = new HashMap<>();

    // order by test set
    for (ExternalResults result : extResults) {
        String IdKey = result.trainSetName + result.testSetName + result.featureSetName;

        if (extResultsByTrainTestFeature.containsKey(IdKey)) {
            extResultsByTrainTestFeature.get(IdKey).add(result);
        } else {
            extResultsByTrainTestFeature.put(IdKey, new ArrayList<ExternalResults>());
            extResultsByTrainTestFeature.get(IdKey).add(result);
        }
    }

    ArrayList<ExternalResults> aggregatedResults = new ArrayList<>();

    // aggregate results or keep as are
    for (Entry<String, ArrayList<ExternalResults>> trainTestSplit : extResultsByTrainTestFeature.entrySet()) {
        ExternalResults aggrResult = new ExternalResults();

        double recall = 0;
        double fMeasure = 0;
        double precision = 0;
        double accuracy = 0;
        int nrClassifiers = 0;

        // for all entries that are from the same train/test split and use the same feature set -> aggregate results
        for (ExternalResults result : trainTestSplit.getValue()) {
            aggrResult.testSetName = result.testSetName;
            aggrResult.trainSetName = result.trainSetName;
            aggrResult.classifierParameters = result.classifierParameters;
            aggrResult.featureSetName = result.featureSetName;

            recall += result.recall;
            fMeasure += result.fMeasure;
            precision += result.precision;
            accuracy += result.accuracy;
            nrClassifiers++;
        }

        aggrResult.accuracy = (accuracy / nrClassifiers);
        aggrResult.fMeasure = (fMeasure / nrClassifiers);
        aggrResult.recall = (recall / nrClassifiers);
        aggrResult.precision = (precision / nrClassifiers);

        aggregatedResults.add(aggrResult);
    }

    // write values of measure
    for (ExternalResults result : aggregatedResults) {
        String outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0",
                result.featureSetName, "Percent Correct", result.accuracy);
        outputRows.add(outputRow);

        outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0",
                result.featureSetName, "Weighted Precision", result.precision);
        outputRows.add(outputRow);

        outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0",
                result.featureSetName, "Weighted Recall", result.recall);
        outputRows.add(outputRow);

        outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0",
                result.featureSetName, "Weighted F-Measure", result.fMeasure);
        outputRows.add(outputRow);

    }

    // Write aggregated data to a new file
    try {
        PrintWriter out = new PrintWriter(new FileWriter(outputFile, true));
        for (String s : outputRows) {
            out.println(s);
        }
        out.close();
    } catch (IOException e) {
        System.err.println("Error while writing aggregated Train-Test file.");
        e.printStackTrace();
    }

    logger.log(Level.INFO,
            String.format("Finished import. The aggregated data was written to %s.", outFileName));
}

From source file:com.chargebee.CSV.PhoneBook.PhoneBook2.PhoneBook.java

private void display(HashMap<String, ArrayList> map) {
    Scanner sc = new Scanner(System.in);

    System.out.println("Search by : name/number?");
    String choice = sc.nextLine();
    System.out.println("Enter the " + choice + " : ");
    String param = sc.nextLine();

    if (choice.toLowerCase().equalsIgnoreCase("name")) {
        for (String key : map.keySet()) {
            if (key.toLowerCase().contains(param)) {
                ArrayList<Person> tempo = new ArrayList();
                tempo = map.get(key);//from ww w  .  j  a v  a 2s .c  om
                for (Person p : tempo) {
                    p.print();
                }
            }
        }
    } else if (choice.toLowerCase().equalsIgnoreCase("number")) {
        for (Map.Entry<String, ArrayList> entry : map.entrySet()) {
            ArrayList<Person> tempo = new ArrayList();
            tempo = entry.getValue();
            for (Person p : tempo) {
                if (p.getPhone().getHomeNumber().equalsIgnoreCase(param)
                        || p.getPhone().getMobileNumber().equalsIgnoreCase(param)
                        || p.getPhone().getWorkNumber().equalsIgnoreCase(param)) {
                    p.print();
                    break;
                }
                System.out.println("Phone Number not found!!");
            }
        }
    } else {
        System.out.println("Invalid Choice!!");
    }
}

From source file:iddb.core.IDDBService.java

public void saveServerBanPermissions(Server server, HashMap<Long, Long> perm) throws ApplicationError {
    if (server.getBanPermissions() != null)
        server.getBanPermissions().clear();
    for (Entry<Long, Long> p : perm.entrySet()) {
        server.setBanPermission(p.getKey(), p.getValue());
    }/*w ww  . j  a v  a 2 s  . co m*/
    try {
        serverDAO.saveBanPermissions(server);
    } catch (DAOException e) {
        throw new ApplicationError(e);
    }
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testEntrySetEntrySetterNonString() {
    HashMap hashMap = new HashMap();
    Integer key = 1;/*from   w w  w.  j a  va  2  s  .  c o  m*/
    hashMap.put(key, 2);
    Set entrySet = hashMap.entrySet();
    Entry entry = (Entry) entrySet.iterator().next();

    entry.setValue(3);
    assertEquals(3, hashMap.get(key));

    hashMap.put(key, 4);
    assertEquals(4, entry.getValue());

    assertEquals(1, hashMap.size());
}