Example usage for java.util LinkedHashMap keySet

List of usage examples for java.util LinkedHashMap keySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:org.appcelerator.titanium.analytics.TiAnalyticsService.java

@Override
public void onStart(Intent intent, final int startId) {
    super.onStart(intent, startId);

    if (!sending.compareAndSet(false, true)) {
        Log.i(TAG, "Send already in progress, skipping intent");
    }//from   ww w .  java2  s  . c om

    final TiAnalyticsService self = this;

    Thread t = new Thread(new Runnable() {

        public void run() {
            Log.i(TAG, "Analytics Service Started");
            try {

                if (connectivityManager == null) {
                    Log.w(TAG, "Connectivity manager not available.");
                    stopSelf(startId);
                    return;
                }
                TiAnalyticsModel model = new TiAnalyticsModel(self);
                if (!model.hasEvents()) {
                    Log.d(TAG, "No events to send.", Log.DEBUG_MODE);
                    stopSelf(startId);
                    return;
                }

                while (model.hasEvents()) {
                    if (canSend()) {
                        LinkedHashMap<Integer, JSONObject> events = model
                                .getEventsAsJSON(BUCKET_SIZE_FAST_NETWORK);

                        int len = events.size();
                        int[] eventIds = new int[len];
                        Iterator<Integer> keys = events.keySet().iterator();

                        JSONArray records = new JSONArray();
                        // build up data to send and records to delete on success
                        for (int i = 0; i < len; i++) {
                            int id = keys.next();
                            // ids are kept even on error JSON to prevent unrestrained growth
                            // and a queue blocked by bad records.
                            eventIds[i] = id;
                            records.put(events.get(id));

                            if (Log.isDebugModeEnabled()) {
                                JSONObject obj = events.get(id);
                                Log.d(TAG, "Sending event: type = " + obj.getString("type") + ", timestamp = "
                                        + obj.getString("ts"));
                            }
                        }
                        boolean deleteEvents = true;
                        if (records.length() > 0) {
                            if (Log.isDebugModeEnabled()) {
                                Log.d(TAG, "Sending " + records.length() + " analytics events.");
                            }
                            try {
                                String jsonData = records.toString() + "\n";
                                String postUrl = TiApplication.getInstance() == null ? ANALYTICS_URL
                                        : ANALYTICS_URL + TiApplication.getInstance().getAppGUID();

                                HttpPost httpPost = new HttpPost(postUrl);
                                StringEntity entity = new StringEntity(jsonData);
                                entity.setContentType("text/json");
                                httpPost.setEntity(entity);

                                HttpParams httpParams = new BasicHttpParams();
                                HttpConnectionParams.setConnectionTimeout(httpParams, 5000); //TODO use property
                                //HttpConnectionParams.setSoTimeout(httpParams, 15000); //TODO use property
                                HttpClient client = new DefaultHttpClient(httpParams);

                                ResponseHandler<String> responseHandler = new BasicResponseHandler();
                                client.getParams().setBooleanParameter("http.protocol.expect-continue", false);

                                @SuppressWarnings("unused")
                                String response = client.execute(httpPost, responseHandler);
                            } catch (Throwable t) {
                                Log.e(TAG, "Error posting events: " + t.getMessage(), t);
                                deleteEvents = false;
                                records = null;
                                break;
                            }
                        }

                        records = null;

                        if (deleteEvents) {
                            model.deleteEvents(eventIds);
                        }

                        events.clear();
                    } else {
                        Log.w(TAG, "Network unavailable, can't send analytics");
                        //TODO reset alarm?
                        break;
                    }
                }

                Log.i(TAG, "Stopping Analytics Service");
                stopSelf(startId);
            } catch (Throwable t) {
                Log.e(TAG, "Unhandled exception in analytics thread: ", t);
                stopSelf(startId);
            } finally {
                if (!sending.compareAndSet(true, false)) {
                    Log.w(TAG, "Expected to be in a sending state. Sending was already false.", Log.DEBUG_MODE);
                }
            }
        }
    });
    t.setPriority(Thread.MIN_PRIORITY);
    t.start();
}

From source file:com.joyfulmongo.db.JFMongoCmdQuery.java

private void processIncludes(Map<String, LinkedHashMap<String, List<JFMongoObject>>> includeKeyToPointerListMap,
        Map<String, String> includeKeyToPointerColnameMap) {
    for (String includeKey : includeFields) {
        String pointerColName = includeKeyToPointerColnameMap.get(includeKey);
        if (pointerColName != null) {
            LinkedHashMap<String, List<JFMongoObject>> pointerObjectIdToParentObjectsMap = includeKeyToPointerListMap
                    .get(includeKey);//  w w w  . java 2 s. c om
            Set<String> referreeObjIds = pointerObjectIdToParentObjectsMap.keySet();

            JFMongoCmdQuery.Builder queryBuilder = new JFMongoCmdQuery.Builder(pointerColName);
            queryBuilder.whereContainedIn(Constants.Props.objectId.toString(), referreeObjIds);
            List<JFMongoObject> refereeObjects = queryBuilder.build().find();

            for (JFMongoObject refereeObj : refereeObjects) {
                String refereeObjId = refereeObj.getObjectId();
                List<JFMongoObject> parentObjs = pointerObjectIdToParentObjectsMap.get(refereeObjId);
                for (JFMongoObject parentObj : parentObjs) {
                    ContainerObjectPointer[] pointers = parentObj.getPointer(includeKey);
                    for (ContainerObjectPointer pointer : pointers) {
                        pointer.replaceObject(refereeObj);
                    }
                }
            }
        }
    }
}

From source file:org.egov.collection.integration.pgi.AxisAdaptor.java

/**
 * This method is for creating a URL query string.
 *
 * @param buf is the inital URL for appending the encoded fields to
 * @param fields is the input parameters from the order page
 *//*from ww  w .  j  a  v a  2  s . c o m*/
@SuppressWarnings({ "rawtypes", "unchecked" })
// Method for creating a URL query string
private void appendQueryFields(final StringBuilder buf, final LinkedHashMap<String, String> fields) {

    // create a list
    final List fieldNames = new ArrayList(fields.keySet());
    final Iterator itr = fieldNames.iterator();

    // move through the list and create a series of URL key/value pairs
    while (itr.hasNext()) {
        final String fieldName = (String) itr.next();
        final String fieldValue = fields.get(fieldName);

        if (fieldValue != null && fieldValue.length() > 0)
            // append the URL parameters
            try {
                buf.append(URLEncoder.encode(fieldName, UTF8));
                buf.append('=');
                buf.append(URLEncoder.encode(fieldValue, UTF8));
            } catch (final UnsupportedEncodingException e) {
                LOGGER.error("Error appending QueryFields" + e);
                throw new ApplicationRuntimeException(e.getMessage());
            }
        // add a '&' to the end if we have more fields coming.
        if (itr.hasNext())
            buf.append('&');
    }
}

From source file:com.proofpoint.jmx.MBeanRepresentation.java

public MBeanRepresentation(MBeanServer mbeanServer, ObjectName objectName, ObjectMapper objectMapper)
        throws JMException {
    this.objectName = objectName;

    MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(objectName);

    className = mbeanInfo.getClassName();
    description = mbeanInfo.getDescription();
    descriptor = toMap(mbeanInfo.getDescriptor());

    ////from  w ww .j  a va2s  .c  om
    // Attributes
    //
    LinkedHashMap<String, MBeanAttributeInfo> attributeInfos = Maps.newLinkedHashMap();
    for (MBeanAttributeInfo attributeInfo : mbeanInfo.getAttributes()) {
        attributeInfos.put(attributeInfo.getName(), attributeInfo);
    }

    String[] attributeNames = attributeInfos.keySet().toArray(new String[attributeInfos.size()]);
    ImmutableList.Builder<AttributeRepresentation> attributes = ImmutableList.builder();
    for (Attribute attribute : mbeanServer.getAttributes(objectName, attributeNames).asList()) {
        String attributeName = attribute.getName();

        // use remove so we only include one value for each attribute
        MBeanAttributeInfo attributeInfo = attributeInfos.remove(attributeName);
        if (attributeInfo == null) {
            // unknown extra attribute, could have been added after MBeanInfo was fetched
            continue;
        }

        Object attributeValue = attribute.getValue();
        AttributeRepresentation attributeRepresentation = new AttributeRepresentation(attributeInfo,
                attributeValue, objectMapper);
        attributes.add(attributeRepresentation);
    }
    this.attributes = attributes.build();

    //
    // Operations
    //
    ImmutableList.Builder<OperationRepresentation> operations = ImmutableList.builder();
    for (MBeanOperationInfo operationInfo : mbeanInfo.getOperations()) {
        operations.add(new OperationRepresentation(operationInfo));
    }
    this.operations = operations.build();
}

From source file:com.compomics.pride_asa_pipeline.core.logic.enzyme.EnzymePredictor.java

public Enzyme getMainEnzyme(LinkedHashMap<Enzyme, Integer> enzymecounts) {
    int chymoTrypsin;
    int argC;/*from ww  w.  j av a2s . c o  m*/
    int lysC;
    int pepsin;
    int trypsin;
    //if none of these, just pick the highest ranking

    Enzyme highestCount = enzymeFactory.getEnzyme("Trypsin");
    for (Enzyme anEnzyme : enzymecounts.keySet()) {
        int bestGuessCount = enzymecounts.get(highestCount);
        if (enzymecounts.get(anEnzyme) > bestGuessCount) {
            highestCount = anEnzyme;
        }
    }

    try {
        trypsin = enzymecounts.get(enzymeFactory.getEnzyme("Trypsin"));
    } catch (NullPointerException e) {
        trypsin = 0;
    }

    try {
        chymoTrypsin = enzymecounts.get(enzymeFactory.getEnzyme("Chymotrypsin (FYWL)"));
    } catch (NullPointerException e) {
        chymoTrypsin = 0;
    }
    try {
        pepsin = enzymecounts.get(enzymeFactory.getEnzyme("Pepsin A"));
    } catch (NullPointerException e) {
        pepsin = 0;
    }

    if (highestCount.getName().toUpperCase().contains("TRYPSIN +")
            && (double) chymoTrypsin < (double) (0.5 * trypsin)) {
        highestCount = enzymeFactory.getEnzyme("Trypsin");
    }

    //check if it's not chymotrypsin or pepsin...
    if (highestCount.getName().toUpperCase().contains("CHYMOTRYP")) {
        double trypsinToChymoTrypsin = (double) ((double) trypsin / (double) chymoTrypsin);
        double trypsinToPepsin = (double) ((double) trypsin / (double) pepsin);
        double pepsinToChymoTrypsin = (double) ((double) pepsin / (double) chymoTrypsin);
        if (trypsinToChymoTrypsin <= 0.5 && pepsinToChymoTrypsin < 0.8) {
            highestCount = enzymeFactory.getEnzyme("Chymotrypsin (FYWL)");
        } else if (trypsinToPepsin < 0.5) {
            highestCount = enzymeFactory.getEnzyme("Pepsin A");
        }
    } else if (highestCount.getName().toLowerCase().contains("pepsin")) {
        try {
            Enzyme chymoTrypsinEnzyme = enzymeFactory.getEnzyme("Chymotrypsin (FYWL)");
            chymoTrypsin = enzymecounts.get(chymoTrypsinEnzyme);
        } catch (NullPointerException e) {
            chymoTrypsin = 0;
        }
        //AT THIS POINT, IT IS NOT TRYPSIN, or C 
        if ((pepsin / chymoTrypsin) >= 0.9) {
            return highestCount;
        }
        highestCount = enzymeFactory.getEnzyme("Chymotrypsin (FYWL)");
    }

    //ELSE IT COULD VERY WELL BE TRYPSIN
    if (highestCount.getName().equalsIgnoreCase("trypsin")) {
        double trypsinToPepsin = (double) ((double) pepsin / (double) trypsin);

        try {
            argC = enzymecounts.get(enzymeFactory.getEnzyme("Arg-C"));
        } catch (NullPointerException e) {
            argC = 0;
        }
        try {
            lysC = enzymecounts.get(enzymeFactory.getEnzyme("Lys-C"));
        } catch (NullPointerException e) {
            lysC = 0;
        }
        //check if arg-c or lys-c
        double argToTryps = (double) (1 - ((double) argC / (double) trypsin));
        double lysToTryps = (double) (1 - ((double) lysC / (double) trypsin));
        if (trypsinToPepsin > 0.5) {
            highestCount = enzymeFactory.getEnzyme("Pepsin A");
        } else if (-0.1 < argToTryps && argToTryps < 0.1) {
            highestCount = enzymeFactory.getEnzyme("Arg-C");
        } else if (-0.1 < lysToTryps && lysToTryps < 0.1) {
            highestCount = enzymeFactory.getEnzyme("Lys-C");
        } else {
            highestCount = enzymeFactory.getEnzyme("Trypsin");
        }
    }

    bestGuess = highestCount;
    for (Enzyme anEnzyme : enzymecounts.keySet()) {
        System.out.println(anEnzyme.getName() + ":" + enzymecounts.get(anEnzyme));
    }
    return highestCount;
}

From source file:com.itemanalysis.psychometrics.measurement.TestSummary.java

public TestSummary(int numberOfItems, int numberOfSubscales, int[] cutScores,
        LinkedHashMap<VariableName, VariableAttributes> variableAttributeMap, boolean unbiased,
        boolean deletedReliability, boolean showCsem) {

    this.variableAttributes = new ArrayList<VariableAttributes>();
    for (VariableName v : variableAttributeMap.keySet()) {
        this.variableAttributes.add(variableAttributeMap.get(v));
    }/*from   w  w w .j a v  a2 s  . c  o m*/

    this.unbiased = unbiased;
    this.numberOfItems = numberOfItems;
    this.cutScores = cutScores;
    this.deletedReliability = deletedReliability;
    this.showCsem = showCsem;
    stats = new DescriptiveStatistics();
    stdDev = new StandardDeviation(unbiased);
    relMatrix = new CovarianceMatrix(variableAttributes);
    this.numberOfSubscales = numberOfSubscales;
    if (numberOfSubscales > 1)
        partRelMatrix = new CovarianceMatrix(numberOfSubscales);

}

From source file:com.tacitknowledge.util.migration.DistributedMigrationProcess.java

/**
 * Applies the necessary rollbacks to the system.
 *
 * @param currentPatchInfoStore//from www  .  j a  v  a  2 s.  c o m
 * @param rollbackLevels        the level that the system should rollback to
 * @param context               information and resources that are available to the migration tasks
 * @return the number of <code>RollbackableMigrationTasks</code> which have been rolled back
 * @throws MigrationException if a rollback fails
 * @Override
 */
public final int doRollbacks(final PatchInfoStore currentPatchInfoStore, final int[] rollbackLevels,
        final MigrationContext context, boolean forceRollback) throws MigrationException {
    log.debug("Starting doRollbacks");
    // get all of the allTasks, with launchers, then get the list of just
    // allTasks
    LinkedHashMap rollbacksWithLaunchers = getMigrationTasksWithLaunchers();
    List allTasks = new ArrayList();
    allTasks.addAll(rollbacksWithLaunchers.keySet());

    List<MigrationTask> rollbackCandidates = getMigrationRunnerStrategy().getRollbackCandidates(allTasks,
            rollbackLevels, currentPatchInfoStore);

    validateControlledSystems(currentPatchInfoStore);
    rollbackDryRun(rollbackCandidates, rollbacksWithLaunchers);

    if (rollbackCandidates.size() > 0) {
        log.info("A total of " + rollbackCandidates.size() + " rollback patch tasks will execute.");
    } else {
        log.info("System up-to-date.  No patch tasks will rollback.");
    }

    if (isPatchSetRollbackable(rollbackCandidates) || forceRollback) {
        if (isReadOnly()) {
            throw new MigrationException("Unapplied rollbacks exist, but read-only flag is set");
        }

        for (Iterator rollbackIterator = rollbackCandidates.iterator(); rollbackIterator.hasNext();) {
            RollbackableMigrationTask task = (RollbackableMigrationTask) rollbackIterator.next();
            // Execute the task in the context it was loaded from
            JdbcMigrationLauncher launcher = (JdbcMigrationLauncher) rollbacksWithLaunchers.get(task);

            // iterate through all the contexts
            for (Iterator j = launcher.getContexts().keySet().iterator(); j.hasNext();) {
                MigrationContext launcherContext = (MigrationContext) j.next();
                applyRollback(launcherContext, task, true);
            }
        }

    } else {
        log.info("Could not complete rollback because one or more of the tasks is not rollbackable.");
    }

    List<MigrationTask> rollbacksNotApplied = getMigrationRunnerStrategy()
            .getRollbackCandidates(rollbackCandidates, rollbackLevels, currentPatchInfoStore);

    if (rollbacksNotApplied.isEmpty()) {
        log.info("Rollback complete (" + rollbackCandidates.size() + " patch tasks rolledback)");
    } else {
        log.info("The system could not rollback the tasks");
    }
    return rollbackCandidates.size() - rollbacksNotApplied.size();
}

From source file:gr.iit.demokritos.cru.cps.ai.ComputationalCreativityMetrics.java

public double MinClosure(String phrase, String story) {
    double closure = 0.0;
    //in case minclosue is not called by ComputeRar_Eff
    if (story.equalsIgnoreCase("")) {
        story = phrase;//from w w w.  ja  v  a2 s .c o  m
    }
    //hashmap of the terms and their index
    HashMap<String, Double> termIndex = new HashMap<String, Double>();
    //take the top terms of the phrase by their stems tf
    // HashMap<ArrayList<String>, Double> termsTf = inf.TopTerms(story.toLowerCase(), true);

    for (String s : phrase.split(" ")) {
        termIndex.put(s, 1.0 * story.indexOf(s));
    }

    //sort the hashamp (descending) and traverse it reversely, to start from the first word in the phrase
    LinkedHashMap<String, Double> sorted = inf.sortHashMapByValues(termIndex);
    ListIterator iter = new ArrayList(sorted.keySet()).listIterator(sorted.size());

    HashMap<String, Double> graph = new HashMap<String, Double>();
    //store the first word in the phrase, in order to be found in the first iteration
    graph.put(sorted.keySet().toArray()[sorted.keySet().size() - 1].toString(), 0.0);
    //for each word that comes next in the phrase
    while (iter.hasPrevious()) {
        String s = iter.previous().toString();
        //find the shortest distance from it to the root (first word)
        double min = 1.0;
        //looking through every word that has already defined its min distance to the root
        for (String k : graph.keySet()) {
            double dist = getDistance(s, k); //+ graph.get(k);
            if (dist < min) {
                min = dist;
            }
        }
        graph.put(s, min);
        //keep the overal sum of weights of the edges
        closure += min;
    }
    return closure;

}

From source file:com.px100systems.data.browser.controller.MainController.java

@SuppressWarnings("unchecked")
private void browse(ModelAndView mav, String entityName, Integer tenantId, String filter, Integer orderBy,
        Integer order, String fields) {
    mav.setViewName("main");
    mav.addObject("cluster", clusterName);

    LinkedHashMap<Integer, String> tenants = dataStorage.getTenants();
    mav.addObject("tenants", tenants);
    mav.addObject("tenantId", tenantId != null ? tenantId : tenants.keySet().iterator().next());

    Set<String> entities = dataStorage.entityNames();
    mav.addObject("entities", entities);
    mav.addObject("entityName", entityName != null ? entityName : entities.iterator().next());

    mav.addObject("filter", filter == null ? "" : filter.replace("\"", "&quot;"));
    mav.addObject("orderBy", orderBy);
    mav.addObject("order", order);
    mav.addObject("fields", fields == null ? "" : fields.replace("\"", "&quot;"));

    LinkedHashMap<Long, String> result = new LinkedHashMap<Long, String>();
    mav.addObject("result", result);

    if (entityName != null)
        try {/*  w ww .j a  v  a 2 s.co  m*/
            Class entityClass = dataStorage.entityClass(entityName);

            String fieldsEL = fields != null && !fields.trim().isEmpty() ? fields.trim()
                    : "${#root.toString()}";
            Expression expression = new SpelExpressionParser().parseExpression(fieldsEL,
                    new ELTemplateParserContext());

            Transaction tx = dataStorage.transaction(tenantId);
            Criteria criteria = parseCriteria(filter);

            for (Object row : tx.find(entityClass, criteria,
                    Collections.singletonList(parseOrderBy(orderBy == 2, order == 2)), MAX_ROWS)) {
                result.put(((StoredBean) row).getId(),
                        expression.getValue(new SpringELCtx(row), String.class).replace("\"", "&quot;"));
            }
        } catch (Exception e) {
            mav.addObject("error", e.getMessage());
        }
}

From source file:eu.hydrologis.jgrass.charting.impl.JGrassXYTimeLineChart.java

/**
 * A line chart creator basing on series made up two values per row. More series, independing
 * one from the other are supported.//from w  w w  . ja v  a2s. c  o m
 * 
 * @param chartValues - a hashmap containing as keys the name of the series and as values the
 *        double[][] representing the data. In this case the x value is assumed to ge a date.
 *        Important: the data matrix has to be passed as two rows (not two columns)
 */
public JGrassXYTimeLineChart(LinkedHashMap<String, double[][]> chartValues,
        Class<RegularTimePeriod> timeClass) {
    try {
        chartSeries = new TimeSeries[chartValues.size()];

        constructor = timeClass.getConstructor(Date.class);

        final Iterator<String> it = chartValues.keySet().iterator();
        int count = 0;
        while (it.hasNext()) {
            final String key = it.next();
            final double[][] values = chartValues.get(key);

            chartSeries[count] = new TimeSeries(key, timeClass);
            for (int i = 0; i < values[0].length; i++) {
                // important: the data matrix has to be passed as two rows (not
                // two columns)
                double val = values[1][i];
                if (isNovalue(val))
                    continue;
                chartSeries[count].add(constructor.newInstance(new Date((long) values[0][i])), val);
            }
            count++;
        }

        lineDataset = new TimeSeriesCollection();
        for (int i = 0; i < chartSeries.length; i++) {
            lineDataset.addSeries(chartSeries[i]);
        }
        lineDataset.setXPosition(TimePeriodAnchor.MIDDLE);
    } catch (Exception e) {
        ChartPlugin.log("ChartPlugin problem", e); //$NON-NLS-1$
    }

}