Example usage for java.util HashMap entrySet

List of usage examples for java.util HashMap entrySet

Introduction

In this page you can find the example usage for java.util HashMap entrySet.

Prototype

Set entrySet

To view the source code for java.util HashMap entrySet.

Click Source Link

Document

Holds cached entrySet().

Usage

From source file:cr.ac.siua.tec.utils.impl.ConstancyPDFGenerator.java

/**
 * Fills the PDF file (constancia.pdf) with the ticket values and returns base64 encoded string.
 *//*from www  .ja va  2  s  .  c  om*/
@Override
public String generate(HashMap<String, String> formValues) {
    String originalPdf = PDFGenerator.RESOURCES_PATH + "constancia.pdf";
    try {
        PDDocument _pdfDocument = PDDocument.load(originalPdf);
        PDDocumentCatalog docCatalog = _pdfDocument.getDocumentCatalog();
        PDAcroForm acroForm = docCatalog.getAcroForm();

        //Set some fields manually.
        Calendar cal = Calendar.getInstance();
        int day = cal.get(Calendar.DAY_OF_MONTH);
        int year = cal.get(Calendar.YEAR);
        int month = cal.get(Calendar.MONTH);
        String date = String.valueOf(day) + " de " + monthsMap.get(month) + " del ao " + String.valueOf(year)
                + ".";
        acroForm.getField("Fecha").setValue(date);

        formValues.remove("Queue");
        formValues.remove("Motivo");
        formValues.remove("Requestors");

        //Iterates through remaining custom fields.
        for (Map.Entry<String, String> entry : formValues.entrySet()) {
            acroForm.getField(entry.getKey()).setValue(entry.getValue());
        }
        return encodePDF(_pdfDocument);
    } catch (IOException e) {
        e.printStackTrace();
        System.out.println("Excepcin al llenar el PDF.");
        return null;
    }
}

From source file:com.perceptive.epm.perkolcentral.bl.LicensesBL.java

public String getLicenseSummaryChartData() throws ExceptionWrapper {
    String chartData = "";
    try {//from   w  w w . j a va2 s  . c o m
        HashMap<String, ArrayList<String>> licenseInfoKeyedByLicenseName = getLicenseRelatedInfo();
        String categories = "";
        for (String licenseTypeName : licenseInfoKeyedByLicenseName.keySet()) {
            categories = categories + "<category name='" + StringUtils.abbreviate(licenseTypeName, 15)
                    + "' hoverText='" + licenseTypeName + "'/>";
        }
        String licensePurchased = "";
        String licenseUsedUp = "";
        /*for (Object obj : licenseInfoKeyedByLicenseName.values()) {
        ArrayList<String> values = (ArrayList<String>) obj;
        licensePurchased = licensePurchased + "<set value='" + values.get(0) + "'/>";
        licenseUsedUp = licenseUsedUp + "<set value='" + values.get(1) + "' link='JavaScript: isJavaScriptCall=true;$('#id_selectedLicenseTypeId').val('');'/>";
        } */
        for (Map.Entry<String, ArrayList<String>> obj : licenseInfoKeyedByLicenseName.entrySet()) {
            ArrayList<String> values = obj.getValue();
            licensePurchased = licensePurchased + "<set value='" + values.get(0) + "'/>";
            licenseUsedUp = licenseUsedUp + "<set value='" + values.get(1)
                    + "' link='JavaScript:populateTheDetails(%26apos;" + obj.getKey() + "%26apos;)'/>";
        }
        chartData = "<graph xaxisname='License Type' yaxisname='Number Of Licenses' hovercapbg='DEDEBE' hovercapborder='889E6D' rotateNames='0' yAxisMaxValue='100' numdivlines='9' divLineColor='CCCCCC' divLineAlpha='80' decimalPrecision='0' showAlternateHGridColor='1' AlternateHGridAlpha='30' AlternateHGridColor='CCCCCC' caption='Perceptive Software Kolkata' subcaption='License purchase/distribution summary'>"
                + "<categories font='Arial' fontSize='11' fontColor='000000'>" + categories + "</categories>"
                + "<dataset seriesname='Total License Purchased' color='FDC12E'>" + licensePurchased
                + "</dataset>" + "<dataset seriesname='Total License Used Up' color='56B9F9'>" + licenseUsedUp
                + "</dataset>" + "</graph>";

    } catch (Exception ex) {
        throw new ExceptionWrapper(ex);
    }
    return chartData;
}

From source file:net.geant.edugain.filter.EduGAINFilter.java

/** Generates the information for the Lcook
 * @param request Incoming request//w w w.j a  va2 s . c om
 * @param outdated If we have to create outdated cookies, as in a LOGOUT handling
 * @return Lcook data
 * @throws ServletException if there has been an exception while writing in the cookie registry
 */
private String generateCookie(HashMap<String, String> attrs, boolean outdated) {
    String rawCook = "";
    long timeStamp = System.currentTimeMillis() / 1000;
    if (outdated) {
        timeStamp = 0;
    }
    rawCook += String.valueOf(timeStamp);
    rawCook += ":";
    rawCook += this.location;
    rawCook += ":";
    String serviceID = this.serviceID;
    rawCook += serviceID;
    rawCook += ":";
    String userData = "";
    Iterator it = attrs.entrySet().iterator();
    while (it.hasNext()) {
        Entry<String, String> entry = (Entry<String, String>) it.next();
        userData += entry.getKey() + "=" + entry.getValue() + ",";
    }
    userData = userData.substring(0, userData.length() - 1);
    rawCook += userData;
    return rawCook;
}

From source file:de.uni_potsdam.hpi.bpt.bp2014.jcore.rest.RestInterface.java

/**
 * Returns a JSON-Object, which contains information about all
 * data objects of a specified scenario instance.
 * The data contains the id, label and state.
 *
 * @param scenarioID   The ID of the scenario model.
 * @param instanceID   The ID of the scenario instance.
 * @param filterString A String which specifies a filter. Only Data
 *                     Objects with a label containing this string
 *                     will be returned.
 * @return A Response with the outcome of the GET-Request. The Response
 * will be a 200 (OK) if the specified instance was found. Hence
 * the JSON-Object will be returned./*from   w w  w  .  j a  v  a  2s .c o  m*/
 * It will be a 301 (REDIRECT) if the scenarioID is wrong.
 * And a 404 if the instance id is wrong.
 */
@GET
@Path("scenario/{scenarioID}/instance/{instanceID}/dataobject")
@Produces(MediaType.APPLICATION_JSON)
public Response getDataObjects(@Context UriInfo uriInfo, @PathParam("scenarioID") int scenarioID,
        @PathParam("instanceID") int instanceID, @QueryParam("filter") String filterString) {
    ExecutionService executionService = new ExecutionService();
    //TODO: add link to detail REST call for more information about each dataobject
    if (!executionService.existScenarioInstance(instanceID)) {
        return Response.status(Response.Status.NOT_FOUND).type(MediaType.APPLICATION_JSON)
                .entity("{\"error\":\"There is no instance with the id " + instanceID + "\"}").build();
    } else if (!executionService.existScenario(scenarioID)) {
        try {
            return Response.seeOther(new URI(
                    "interface/v2/scenario/" + executionService.getScenarioIDForScenarioInstance(instanceID)
                            + "/instance/" + instanceID + "/dataobject"))
                    .build();
        } catch (URISyntaxException e) {
            return Response.serverError().build();
        }
    }

    executionService.openExistingScenarioInstance(scenarioID, instanceID);
    LinkedList<Integer> dataObjects = executionService.getAllDataObjectIDs(instanceID);
    HashMap<Integer, String> states = executionService.getAllDataObjectStates(instanceID);
    HashMap<Integer, String> labels = executionService.getAllDataObjectNames(instanceID);
    if (filterString != null && !filterString.isEmpty()) {
        for (Map.Entry<Integer, String> labelEntry : labels.entrySet()) {
            if (!labelEntry.getValue().contains(filterString)) {
                dataObjects.remove(labelEntry.getKey());
                states.remove(labelEntry.getKey());
                labels.remove(labelEntry.getKey());
            }
        }
    }
    JSONObject result = buildListForDataObjects(uriInfo, dataObjects, states, labels);
    return Response.ok(result.toString(), MediaType.APPLICATION_JSON).build();
}

From source file:it.unibas.spicy.persistence.csv.DAOCsv.java

@SuppressWarnings("unchecked")
public void loadInstanceSample(IDataSourceProxy dataSource, HashMap<String, ArrayList<Object>> strfullPath,
        String catalog) throws DAOException {
    INode root = null;/*from  ww w  .  ja va 2s. com*/
    try {
        HashMap<String, ArrayList<Object>> instanceInfoList = (HashMap<String, ArrayList<Object>>) dataSource
                .getAnnotation(SpicyEngineConstants.CSV_INSTANCES_INFO_LIST);
        root = new TupleNode(getNode(catalog).getLabel(), getOID());
        root.setRoot(true);
        for (Map.Entry<String, ArrayList<Object>> entry : strfullPath.entrySet()) {
            String filePath = entry.getKey();
            //the list entry.getValue() contains a)the table name 
            //b)a boolean that contains the info if the instance file includes column names             
            String tableName = (String) entry.getValue().get(0);
            boolean colNames = (Boolean) entry.getValue().get(1);

            SetNode setTable = new SetNode(getNode(tableName).getLabel(), getOID());
            if (logger.isDebugEnabled())
                logger.debug("extracting value for table " + tableName + " ....");

            getInstanceByTable(tableName, setTable, filePath, colNames);
            root.addChild(setTable);
            if (instanceInfoList == null) {
                instanceInfoList = new HashMap<String, ArrayList<Object>>();
                dataSource.addAnnotation(SpicyEngineConstants.CSV_INSTANCES_INFO_LIST, instanceInfoList);
            }
            instanceInfoList.putAll(strfullPath);
        }
        dataSource.addInstanceWithCheck(root);
    } catch (Throwable ex) {
        logger.error(ex);
        throw new DAOException(ex.getMessage());
    }
}

From source file:io.pravega.segmentstore.server.reading.StorageReadManagerTests.java

/**
 * Tests the execute method with valid Requests:
 * * All StreamSegments exist and have enough data.
 * * All read offsets are valid (but we may choose to read more than the length of the Segment).
 * * ReadRequests may overlap.//from   w  w  w  . ja v a  2  s  .  c  om
 */
@Test
public void testValidRequests() throws Exception {
    final int defaultReadLength = MIN_SEGMENT_LENGTH - 1;
    final int offsetIncrement = defaultReadLength / 3;

    @Cleanup
    Storage storage = InMemoryStorageFactory.newStorage(executorService());
    storage.initialize(1);
    byte[] segmentData = populateSegment(storage);
    @Cleanup
    StorageReadManager reader = new StorageReadManager(SEGMENT_METADATA, storage, executorService());
    HashMap<StorageReadManager.Request, CompletableFuture<StorageReadManager.Result>> requestCompletions = new HashMap<>();
    int readOffset = 0;
    while (readOffset < segmentData.length) {
        int readLength = Math.min(defaultReadLength, segmentData.length - readOffset);
        CompletableFuture<StorageReadManager.Result> requestCompletion = new CompletableFuture<>();
        StorageReadManager.Request r = new StorageReadManager.Request(readOffset, readLength,
                requestCompletion::complete, requestCompletion::completeExceptionally, TIMEOUT);
        reader.execute(r);
        requestCompletions.put(r, requestCompletion);
        readOffset += offsetIncrement;
    }

    // Check that the read requests returned with the right data.
    for (val entry : requestCompletions.entrySet()) {
        StorageReadManager.Result readData = entry.getValue().join();
        StorageReadManager.Request request = entry.getKey();
        int expectedReadLength = Math.min(request.getLength(),
                (int) (segmentData.length - request.getOffset()));

        Assert.assertNotNull("No data returned for request " + request, readData);
        Assert.assertEquals("Unexpected read length for request " + request, expectedReadLength,
                readData.getData().getLength());
        AssertExtensions.assertStreamEquals("Unexpected read contents for request " + request,
                new ByteArrayInputStream(segmentData, (int) request.getOffset(), expectedReadLength),
                readData.getData().getReader(), expectedReadLength);
    }
}

From source file:de.tor.tribes.util.report.ReportManager.java

public int filterNow(String pGroup) {
    invalidate();//  www  .  ja  v a  2 s .c  om
    try {
        HashMap<FightReport, String> newGroups = new HashMap<>();
        for (ManageableType t : getAllElements(pGroup)) {
            FightReport report = (FightReport) t;
            for (ReportRule entry : getRules()) {
                if (entry.isValid(report)) {
                    if (!entry.getTargetSet().equals(pGroup)) {
                        //only move report, if the filter points to a new group...
                        //...otherwise, report stays in this group as the current filter is the first fits
                        newGroups.put(report, entry.getTargetSet());
                    }
                    break;
                }
            }
        }

        Set<Entry<FightReport, String>> entries = newGroups.entrySet();
        for (Entry<FightReport, String> entry : entries) {
            //remove report from this group
            removeElement(pGroup, entry.getKey());
            //add report to new group and continue filtering
            addManagedElement(entry.getValue(), entry.getKey(), true);
        }
        return entries.size();
    } finally {
        revalidate(true);
    }
}

From source file:org.openscience.cdk.applications.taverna.weka.classification.EvaluateClassificationResultsAsPDFActivity.java

private void createDataset(Instances dataset, Classifier classifier, DefaultCategoryDataset chartDataset,
        LinkedList<Double> setPercentage, String setname) throws Exception {
    WekaTools tools = new WekaTools();
    HashMap<UUID, Double> orgClassMap = new HashMap<UUID, Double>();
    HashMap<UUID, Double> calcClassMap = new HashMap<UUID, Double>();
    Instances trainUUIDSet = Filter.useFilter(dataset, tools.getIDGetter(dataset));
    dataset = Filter.useFilter(dataset, tools.getIDRemover(dataset));
    for (int k = 0; k < dataset.numInstances(); k++) {
        double pred = classifier.classifyInstance(dataset.instance(k));
        UUID uuid = UUID.fromString(trainUUIDSet.instance(k).stringValue(0));
        calcClassMap.put(uuid, pred);/*from w w w  .  j  a va2s.  c  o  m*/
        orgClassMap.put(uuid, dataset.instance(k).classValue());
    }
    HashMap<Double, Integer> correctPred = new HashMap<Double, Integer>();
    HashMap<Double, Integer> occurances = new HashMap<Double, Integer>();
    for (int k = 0; k < dataset.numInstances(); k++) {
        UUID uuid = UUID.fromString(trainUUIDSet.instance(k).stringValue(0));
        double pred = calcClassMap.get(uuid);
        double org = orgClassMap.get(uuid);
        Integer oc = occurances.get(org);
        if (oc == null) {
            occurances.put(org, 1);
        } else {
            occurances.put(org, ++oc);
        }
        if (pred == org) {
            Integer co = correctPred.get(org);
            if (co == null) {
                correctPred.put(org, 1);
            } else {
                correctPred.put(org, ++co);
            }
        }
    }
    double overall = 0;
    for (Entry<Double, Integer> entry : occurances.entrySet()) {
        Double key = entry.getKey();
        int occ = entry.getValue();
        Integer pred = correctPred.get(key);
        int pre = pred == null ? 0 : pred;
        double ratio = pre / (double) occ * 100;
        overall += ratio;
        chartDataset.addValue(ratio, setname, dataset.classAttribute().value(key.intValue()));
    }
    overall /= occurances.size();
    setPercentage.add(overall);
    chartDataset.addValue(overall, setname, "Overall");
}

From source file:com.google.gerrit.server.project.RefControl.java

private boolean appliesToRef(AccessSection section) {
    String refPattern = section.getRefPattern();

    if (isTemplate(refPattern)) {
        ParamertizedString template = new ParamertizedString(refPattern);
        HashMap<String, String> p = new HashMap<String, String>();

        if (getCurrentUser() instanceof IdentifiedUser) {
            p.put("username", ((IdentifiedUser) getCurrentUser()).getUserName());
        } else {/*ww w .  j  a  va  2s  . c o  m*/
            // Right now we only template the username. If not available
            // this rule cannot be matched at all.
            //
            return false;
        }

        if (isRE(refPattern)) {
            for (Map.Entry<String, String> ent : p.entrySet()) {
                ent.setValue(escape(ent.getValue()));
            }
        }

        refPattern = template.replace(p);
    }

    if (isRE(refPattern)) {
        return Pattern.matches(refPattern, getRefName());

    } else if (refPattern.endsWith("/*")) {
        String prefix = refPattern.substring(0, refPattern.length() - 1);
        return getRefName().startsWith(prefix);

    } else {
        return getRefName().equals(refPattern);
    }
}

From source file:com.linkedin.cubert.pig.piggybank.storage.avro.AvroStorage.java

/**
 * Set input location and obtain input schema.
 *///from   w w  w  . j  a v  a 2 s  .  c o  m
@SuppressWarnings("unchecked")
@Override
public void setLocation(String location, Job job) throws IOException {
    if (inputAvroSchema != null) {
        return;
    }

    if (!UDFContext.getUDFContext().isFrontend()) {
        Properties udfProps = getUDFProperties();
        String mergedSchema = udfProps.getProperty(AVRO_MERGED_SCHEMA_PROPERTY);
        if (mergedSchema != null) {
            HashMap<URI, Map<Integer, Integer>> mergedSchemaMap = (HashMap<URI, Map<Integer, Integer>>) ObjectSerializer
                    .deserialize(mergedSchema);
            schemaToMergedSchemaMap = new HashMap<Path, Map<Integer, Integer>>();
            for (Entry<URI, Map<Integer, Integer>> entry : mergedSchemaMap.entrySet()) {
                schemaToMergedSchemaMap.put(new Path(entry.getKey()), entry.getValue());
            }
        }
        String schema = udfProps.getProperty(AVRO_INPUT_SCHEMA_PROPERTY);
        if (schema != null) {
            try {
                inputAvroSchema = new Schema.Parser().parse(schema);
                return;
            } catch (Exception e) {
                // Cases like testMultipleSchemas2 cause exception while deserializing
                // symbols. In that case, we get it again.
                LOG.warn("Exception while trying to deserialize schema in backend. "
                        + "Will construct again. schema= " + schema, e);
            }
        }
    }

    Configuration conf = job.getConfiguration();
    Set<Path> paths = AvroStorageUtils.getPaths(location, conf, true);
    if (!paths.isEmpty()) {
        // Set top level directories in input format. Adding all files will
        // bloat configuration size
        FileInputFormat.setInputPaths(job, paths.toArray(new Path[paths.size()]));
        // Scan all directories including sub directories for schema
        if (inputAvroSchema == null) {
            setInputAvroSchema(paths, conf);
        }
    } else {
        throw new IOException("Input path \'" + location + "\' is not found");
    }

}