Example usage for java.util HashMap remove

List of usage examples for java.util HashMap remove

Introduction

In this page you can find the example usage for java.util HashMap remove.

Prototype

public V remove(Object key) 

Source Link

Document

Removes the mapping for the specified key from this map if present.

Usage

From source file:org.jasig.portlet.calendar.mvc.controller.CalendarController.java

@RequestMapping
public ModelAndView getCalendar(@RequestParam(required = false, value = "interval") String intervalString,
        RenderRequest request) {/*ww  w. ja  v  a  2s .co  m*/

    PortletSession session = request.getPortletSession(true);

    PortletPreferences prefs = request.getPreferences();

    Map<String, Object> model = new HashMap<String, Object>();

    // get the list of hidden calendars
    @SuppressWarnings("unchecked")
    HashMap<Long, String> hiddenCalendars = (HashMap<Long, String>) session.getAttribute("hiddenCalendars");

    // indicate if the current user is a guest (unauthenticated) user
    model.put("guest", request.getRemoteUser() == null);

    /**
     * Add and remove calendars from the hidden list.  Hidden calendars
     * will be fetched, but rendered invisible in the view.
     */

    // check the request parameters to see if we need to add any
    // calendars to the list of hidden calendars
    String hideCalendar = request.getParameter("hideCalendar");
    if (hideCalendar != null) {
        hiddenCalendars.put(Long.valueOf(hideCalendar), "true");
        session.setAttribute("hiddenCalendars", hiddenCalendars);
    }

    // check the request parameters to see if we need to remove
    // any calendars from the list of hidden calendars
    String showCalendar = request.getParameter("showCalendar");
    if (showCalendar != null) {
        hiddenCalendars.remove(Long.valueOf(showCalendar));
        session.setAttribute("hiddenCalendars", hiddenCalendars);
    }

    // See if we're configured to show or hide the jQueryUI DatePicker.
    // By default, we assume we are to show the DatePicker because that's
    // the classic behavior.
    String showDatePicker = prefs.getValue("showDatePicker", "true");
    model.put("showDatePicker", showDatePicker);

    /**
     * Find our desired starting and ending dates.
     */

    Interval interval = null;

    if (!StringUtils.isEmpty(intervalString)) {
        interval = Interval.parse(intervalString);
        model.put("startDate", new DateMidnight(interval.getStart()).toDate());
        model.put("days", interval.toDuration().getStandardDays());
        model.put("endDate", new DateMidnight(interval.getEnd()));
    } else {
        //StartDate can only be changed via an AJAX request
        DateMidnight startDate = (DateMidnight) session.getAttribute("startDate");
        log.debug("startDate from session is: " + startDate);
        model.put("startDate", startDate.toDate());

        // find how many days into the future we should display events
        int days = (Integer) session.getAttribute("days");
        model.put("days", days);

        // set the end date based on our desired time period
        DateMidnight endDate = startDate.plusDays(days);
        model.put("endDate", endDate.toDate());

        interval = new Interval(startDate, endDate);
    }

    // define "today" and "tomorrow" so we can display these specially in the
    // user interface
    // get the user's configured time zone
    String timezone = (String) session.getAttribute("timezone");
    DateMidnight today = new DateMidnight(DateTimeZone.forID(timezone));
    model.put("today", today.toDate());
    model.put("tomorrow", today.plusDays(1).toDate());

    /**
     * retrieve the calendars defined for this portlet instance
     */

    CalendarSet<?> set = calendarSetDao.getCalendarSet(request);
    List<CalendarConfiguration> calendars = new ArrayList<CalendarConfiguration>();
    calendars.addAll(set.getConfigurations());
    Collections.sort(calendars, new CalendarConfigurationByNameComparator());
    model.put("calendars", calendars);

    Map<Long, Integer> colors = new HashMap<Long, Integer>();
    Map<Long, String> links = new HashMap<Long, String>();
    int index = 0;
    for (CalendarConfiguration callisting : calendars) {

        // don't bother to fetch hidden calendars
        if (hiddenCalendars.get(callisting.getId()) == null) {

            try {

                // get an instance of the adapter for this calendar
                ICalendarAdapter adapter = (ICalendarAdapter) applicationContext
                        .getBean(callisting.getCalendarDefinition().getClassName());

                //get hyperlink to calendar
                String link = adapter.getLink(callisting, interval, request);
                if (link != null) {
                    links.put(callisting.getId(), link);
                }

            } catch (NoSuchBeanDefinitionException ex) {
                log.error("Calendar class instance could not be found: " + ex.getMessage());
            } catch (CalendarLinkException linkEx) {
                // Not an error. Ignore
            } catch (Exception ex) {
                log.error(ex);
            }
        }

        // add this calendar's id to the color map
        colors.put(callisting.getId(), index);
        index++;

    }

    model.put("timezone", session.getAttribute("timezone"));
    model.put("colors", colors);
    model.put("links", links);
    model.put("hiddenCalendars", hiddenCalendars);

    /*
     * Check if we need to disable either the preferences and/or administration links
     */

    Boolean disablePrefs = Boolean.valueOf(prefs.getValue(PREFERENCE_DISABLE_PREFERENCES, "false"));
    model.put(PREFERENCE_DISABLE_PREFERENCES, disablePrefs);
    Boolean disableAdmin = Boolean.valueOf(prefs.getValue(PREFERENCE_DISABLE_ADMINISTRATION, "false"));
    model.put(PREFERENCE_DISABLE_ADMINISTRATION, disableAdmin);

    return new ModelAndView(viewSelector.getCalendarViewName(request), "model", model);
}

From source file:org.cesecore.certificates.ca.CaSessionBean.java

/**
 * Extract keystore or keystore reference and store it as a CryptoToken. Add a reference to the keystore.
 * @return true if any changes where made
 *//*from w ww . ja  v  a2s .  co  m*/
@SuppressWarnings("unchecked")
@Deprecated // Remove when we no longer need to support upgrades from 5.0.x
private boolean adhocUpgradeFrom50(int caid, LinkedHashMap<Object, Object> data, String caName) {
    HashMap<String, String> tokendata = (HashMap<String, String>) data.get(CA.CATOKENDATA);
    if (tokendata.get(CAToken.CRYPTOTOKENID) != null) {
        // Already upgraded
        if (!CesecoreConfiguration.isKeepInternalCAKeystores()) {
            // All nodes in the cluster has been upgraded so we can remove any internal CA keystore now
            if (tokendata.get(CAToken.KEYSTORE) != null) {
                tokendata.remove(CAToken.KEYSTORE);
                tokendata.remove(CAToken.CLASSPATH);
                log.info("Removed duplicate of upgraded CA's internal keystore for CA '" + caName
                        + "' with id: " + caid);
                return true;
            }
        } else {
            if (log.isDebugEnabled()) {
                log.debug("CA '" + caName
                        + "' already has cryptoTokenId and will not have it's token split of to a different db table because db.keepinternalcakeystores=true: "
                        + caid);
            }
        }
        return false;
    }
    // Perform pre-upgrade of CATokenData to correct classpath changes (org.ejbca.core.model.ca.catoken.SoftCAToken)
    tokendata = (LinkedHashMap<String, String>) new CAToken(tokendata).saveData();
    data.put(CA.CATOKENDATA, tokendata);
    log.info("Pulling CryptoToken out of CA '" + caName + "' with id " + caid
            + " into a separate database table.");
    final String str = (String) tokendata.get(CAToken.KEYSTORE);
    byte[] keyStoreData = null;
    if (StringUtils.isNotEmpty(str)) {
        keyStoreData = Base64.decode(str.getBytes());
    }
    String propertyStr = (String) tokendata.get(CAToken.PROPERTYDATA);
    final Properties prop = new Properties();
    if (StringUtils.isNotEmpty(propertyStr)) {
        try {
            // If the input string contains \ (backslash on windows) we must convert it to \\
            // Otherwise properties.load will parse it as an escaped character, and that is not good
            propertyStr = StringUtils.replace(propertyStr, "\\", "\\\\");
            prop.load(new ByteArrayInputStream(propertyStr.getBytes()));
        } catch (IOException e) {
            log.error("Error getting CA token properties: ", e);
        }
    }
    final String classpath = (String) tokendata.get(CAToken.CLASSPATH);
    if (log.isDebugEnabled()) {
        log.debug("CA token classpath: " + classpath);
    }
    // Upgrade the properties value
    final Properties upgradedProperties = PKCS11CryptoToken.upgradePropertiesFileFrom5_0_x(prop);
    // If it is an P11 we are using and the library and slot are the same as an existing CryptoToken we use that CryptoToken's id.
    int cryptoTokenId = 0;
    if (PKCS11CryptoToken.class.getName().equals(classpath)) {
        if (upgradedProperties.getProperty(PKCS11CryptoToken.SLOT_LABEL_TYPE) == null) {
            log.error("Upgrade of CA '" + caName
                    + "' failed due to failed upgrade of PKCS#11 CA token properties.");
            return false;
        }
        for (final Integer currentCryptoTokenId : cryptoTokenSession.getCryptoTokenIds()) {
            final CryptoToken cryptoToken = cryptoTokenSession.getCryptoToken(currentCryptoTokenId.intValue());
            final Properties cryptoTokenProperties = cryptoToken.getProperties();
            if (StringUtils.equals(upgradedProperties.getProperty(PKCS11CryptoToken.SHLIB_LABEL_KEY),
                    cryptoTokenProperties.getProperty(PKCS11CryptoToken.SHLIB_LABEL_KEY))
                    && StringUtils.equals(upgradedProperties.getProperty(PKCS11CryptoToken.ATTRIB_LABEL_KEY),
                            cryptoTokenProperties.getProperty(PKCS11CryptoToken.ATTRIB_LABEL_KEY))
                    && StringUtils.equals(upgradedProperties.getProperty(PKCS11CryptoToken.SLOT_LABEL_VALUE),
                            cryptoTokenProperties.getProperty(PKCS11CryptoToken.SLOT_LABEL_VALUE))
                    && StringUtils.equals(upgradedProperties.getProperty(PKCS11CryptoToken.SLOT_LABEL_TYPE),
                            cryptoTokenProperties.getProperty(PKCS11CryptoToken.SLOT_LABEL_TYPE))) {
                // The current CryptoToken point to the same HSM slot in the same way.. re-use this id!
                cryptoTokenId = currentCryptoTokenId.intValue();
                break;
            }
        }
    }
    if (cryptoTokenId == 0) {
        final String cryptoTokenName = "Upgraded CA CryptoToken for " + caName;
        try {
            cryptoTokenId = cryptoTokenSession.mergeCryptoToken(CryptoTokenFactory.createCryptoToken(classpath,
                    upgradedProperties, keyStoreData, caid, cryptoTokenName));
        } catch (CryptoTokenNameInUseException e) {
            final String msg = "Crypto token name already in use upgrading (adhocUpgradeFrom50) crypto token for CA '"
                    + caName + "', cryptoTokenName '" + cryptoTokenName + "'.";
            log.info(msg, e);
            throw new RuntimeException(msg, e); // Since we have a constraint on CA names to be unique, this should never happen
        } catch (NoSuchSlotException e) {
            final String msg = "Slot as defined by "
                    + upgradedProperties.getProperty(PKCS11CryptoToken.SLOT_LABEL_VALUE) + " for CA '" + caName
                    + "' could not be found.";
            log.error(msg, e);
            throw new RuntimeException(msg, e);
        }
    }
    // Mark this CA as upgraded by setting a reference to the CryptoToken if the merge was successful
    tokendata.put(CAToken.CRYPTOTOKENID, String.valueOf(cryptoTokenId));
    // Note: We did not remove the keystore in the CA properties here, so old versions running in parallel will still work
    log.info("CA '" + caName + "' with id " + caid + " is now using CryptoToken with cryptoTokenId "
            + cryptoTokenId);
    return true;
}

From source file:com.ibm.bi.dml.hops.ipa.InterProceduralAnalysis.java

/**
 * /*  w ww.  j a  va  2 s. c o m*/
 * @param dmlp
 * @throws HopsException 
 */
private void removeConstantBinaryOps(DMLProgram dmlp) throws HopsException {
    //approach: scan over top-level program (guaranteed to be unconditional),
    //collect ones=matrix(1,...); remove b(*)ones if not outer operation      
    HashMap<String, Hop> mOnes = new HashMap<String, Hop>();

    for (StatementBlock sb : dmlp.getStatementBlocks()) {
        //pruning updated variables
        for (String var : sb.variablesUpdated().getVariableNames())
            if (mOnes.containsKey(var))
                mOnes.remove(var);

        //replace constant binary ops
        if (!mOnes.isEmpty())
            rRemoveConstantBinaryOp(sb, mOnes);

        //collect matrices of ones from last-level statement blocks
        if (!(sb instanceof IfStatementBlock || sb instanceof WhileStatementBlock
                || sb instanceof ForStatementBlock)) {
            collectMatrixOfOnes(sb.get_hops(), mOnes);
        }
    }
}

From source file:org.apache.sysml.hops.codegen.SpoofCompiler.java

/**
 * Cleanup generated cplans in order to remove unnecessary inputs created
 * during incremental construction. This is important as it avoids unnecessary 
 * redundant computation. /*from   w  w  w  . j a  v a2s.  c  o m*/
 * 
 * @param memo memoization table
 * @param cplans set of cplans
 */
private static HashMap<Long, Pair<Hop[], CNodeTpl>> cleanupCPlans(CPlanMemoTable memo,
        HashMap<Long, Pair<Hop[], CNodeTpl>> cplans) {
    HashMap<Long, Pair<Hop[], CNodeTpl>> cplans2 = new HashMap<>();
    CPlanOpRewriter rewriter = new CPlanOpRewriter();
    CPlanCSERewriter cse = new CPlanCSERewriter();

    for (Entry<Long, Pair<Hop[], CNodeTpl>> e : cplans.entrySet()) {
        CNodeTpl tpl = e.getValue().getValue();
        Hop[] inHops = e.getValue().getKey();

        //remove invalid plans with null, empty, or all scalar inputs 
        if (inHops == null || inHops.length == 0 || Arrays.stream(inHops).anyMatch(h -> (h == null))
                || Arrays.stream(inHops).allMatch(h -> h.isScalar()))
            continue;

        //perform simplifications and cse rewrites
        tpl = rewriter.simplifyCPlan(tpl);
        tpl = cse.eliminateCommonSubexpressions(tpl);

        //update input hops (order-preserving)
        HashSet<Long> inputHopIDs = tpl.getInputHopIDs(false);
        inHops = Arrays.stream(inHops).filter(p -> p != null && inputHopIDs.contains(p.getHopID()))
                .toArray(Hop[]::new);
        cplans2.put(e.getKey(), new Pair<>(inHops, tpl));

        //remove invalid plans with column indexing on main input
        if (tpl instanceof CNodeCell || tpl instanceof CNodeRow) {
            CNodeData in1 = (CNodeData) tpl.getInput().get(0);
            boolean inclRC1 = !(tpl instanceof CNodeRow);
            if (rHasLookupRC1(tpl.getOutput(), in1, inclRC1) || isLookupRC1(tpl.getOutput(), in1, inclRC1)) {
                cplans2.remove(e.getKey());
                if (LOG.isTraceEnabled())
                    LOG.trace("Removed cplan due to invalid rc1 indexing on main input.");
            }
        } else if (tpl instanceof CNodeMultiAgg) {
            CNodeData in1 = (CNodeData) tpl.getInput().get(0);
            for (CNode output : ((CNodeMultiAgg) tpl).getOutputs())
                if (rHasLookupRC1(output, in1, true) || isLookupRC1(output, in1, true)) {
                    cplans2.remove(e.getKey());
                    if (LOG.isTraceEnabled())
                        LOG.trace("Removed cplan due to invalid rc1 indexing on main input.");
                }
        }

        //remove invalid lookups on main input (all templates)
        CNodeData in1 = (CNodeData) tpl.getInput().get(0);
        if (tpl instanceof CNodeMultiAgg)
            rFindAndRemoveLookupMultiAgg((CNodeMultiAgg) tpl, in1);
        else
            rFindAndRemoveLookup(tpl.getOutput(), in1, !(tpl instanceof CNodeRow));

        //remove invalid row templates (e.g., unsatisfied blocksize constraint)
        if (tpl instanceof CNodeRow) {
            //check for invalid row cplan over column vector
            if (((CNodeRow) tpl).getRowType() == RowType.NO_AGG && tpl.getOutput().getDataType().isScalar()) {
                cplans2.remove(e.getKey());
                if (LOG.isTraceEnabled())
                    LOG.trace("Removed invalid row cplan w/o agg on column vector.");
            } else if (OptimizerUtils.isSparkExecutionMode()) {
                Hop hop = memo.getHopRefs().get(e.getKey());
                boolean isSpark = DMLScript.rtplatform == RUNTIME_PLATFORM.SPARK || OptimizerUtils
                        .getTotalMemEstimate(inHops, hop, true) > OptimizerUtils.getLocalMemBudget();
                boolean invalidNcol = hop.getDataType().isMatrix()
                        && (HopRewriteUtils.isTransposeOperation(hop) ? hop.getDim1() > hop.getRowsInBlock()
                                : hop.getDim2() > hop.getColsInBlock());
                for (Hop in : inHops)
                    invalidNcol |= (in.getDataType().isMatrix() && in.getDim2() > in.getColsInBlock());
                if (isSpark && invalidNcol) {
                    cplans2.remove(e.getKey());
                    if (LOG.isTraceEnabled())
                        LOG.trace("Removed invalid row cplan w/ ncol>ncolpb.");
                }
            }
        }

        //remove cplan w/ single op and w/o agg
        if ((tpl instanceof CNodeCell && ((CNodeCell) tpl).getCellType() == CellType.NO_AGG
                && TemplateUtils.hasSingleOperation(tpl))
                || (tpl instanceof CNodeRow
                        && (((CNodeRow) tpl).getRowType() == RowType.NO_AGG
                                || ((CNodeRow) tpl).getRowType() == RowType.NO_AGG_B1
                                || ((CNodeRow) tpl).getRowType() == RowType.ROW_AGG)
                        && TemplateUtils.hasSingleOperation(tpl))
                || TemplateUtils.hasNoOperation(tpl)) {
            cplans2.remove(e.getKey());
            if (LOG.isTraceEnabled())
                LOG.trace("Removed cplan with single operation.");
        }

        //remove cplan if empty
        if (tpl.getOutput() instanceof CNodeData) {
            cplans2.remove(e.getKey());
            if (LOG.isTraceEnabled())
                LOG.trace("Removed empty cplan.");
        }

        //rename inputs (for codegen and plan caching)
        tpl.renameInputs();
    }

    return cplans2;
}

From source file:org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.java

/**
 * Remove stale storages from storageMap. We must not remove any storages
 * as long as they have associated block replicas.
 *///  w  w w.  j av  a2  s.  c  o m
private void pruneStorageMap(final StorageReport[] reports) {
    synchronized (storageMap) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Number of storages reported in heartbeat=" + reports.length
                    + "; Number of storages in storageMap=" + storageMap.size());
        }

        HashMap<String, DatanodeStorageInfo> excessStorages;

        // Init excessStorages with all known storages.
        excessStorages = new HashMap<String, DatanodeStorageInfo>(storageMap);

        // Remove storages that the DN reported in the heartbeat.
        for (final StorageReport report : reports) {
            excessStorages.remove(report.getStorage().getStorageID());
        }

        // For each remaining storage, remove it if there are no associated
        // blocks.
        for (final DatanodeStorageInfo storageInfo : excessStorages.values()) {
            try {
                if (storageInfo.numBlocks() == 0) {
                    storageMap.remove(storageInfo.getStorageID());
                    LOG.info("Removed storage " + storageInfo + " from DataNode" + this);
                } else if (LOG.isDebugEnabled()) {
                    // This can occur until all block reports are received.
                    LOG.debug("Deferring removal of stale storage " + storageInfo + " with "
                            + storageInfo.numBlocks() + " blocks");
                }
            } catch (IOException e) {
                // Skip for a bit
                LOG.warn(e, e);
            }
        }
    }
}

From source file:com.mobileglobe.android.customdialer.common.model.AccountTypeManager.java

/**
 * Return all usable {@link AccountType}s that support the "invite" feature from the
 * list of all potential invitable account types (retrieved from
 * {@link #getAllInvitableAccountTypes}). A usable invitable account type means:
 * (1) there is at least 1 raw contact in the database with that account type, and
 * (2) the app contributing the account type is not disabled.
 *
 * Warning: Don't use on the UI thread because this can scan the database.
 *///ww  w  . ja  va 2s.  co m
private Map<AccountTypeWithDataSet, AccountType> findUsableInvitableAccountTypes(Context context) {
    Map<AccountTypeWithDataSet, AccountType> allInvitables = getAllInvitableAccountTypes();
    if (allInvitables.isEmpty()) {
        return EMPTY_UNMODIFIABLE_ACCOUNT_TYPE_MAP;
    }

    final HashMap<AccountTypeWithDataSet, AccountType> result = Maps.newHashMap();
    result.putAll(allInvitables);

    final PackageManager packageManager = context.getPackageManager();
    for (AccountTypeWithDataSet accountTypeWithDataSet : allInvitables.keySet()) {
        AccountType accountType = allInvitables.get(accountTypeWithDataSet);

        // Make sure that account types don't come from apps that are disabled.
        Intent invitableIntent = MoreContactUtils.getInvitableIntent(accountType, SAMPLE_CONTACT_URI);
        if (invitableIntent == null) {
            result.remove(accountTypeWithDataSet);
            continue;
        }
        ResolveInfo resolveInfo = packageManager.resolveActivity(invitableIntent,
                PackageManager.MATCH_DEFAULT_ONLY);
        if (resolveInfo == null) {
            // If we can't find an activity to start for this intent, then there's no point in
            // showing this option to the user.
            result.remove(accountTypeWithDataSet);
            continue;
        }

        // Make sure that there is at least 1 raw contact with this account type. This check
        // is non-trivial and should not be done on the UI thread.
        if (!accountTypeWithDataSet.hasData(context)) {
            result.remove(accountTypeWithDataSet);
        }
    }

    return Collections.unmodifiableMap(result);
}

From source file:org.restcomm.android.olympus.SettingsActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    // Handle action bar item clicks here. The action bar will
    // automatically handle clicks on the Home/Up button, so long
    // as you specify a parent activity in AndroidManifest.xml.
    int id = item.getItemId();
    if (id == android.R.id.home) {
        try {//from  w w  w  .j  av a  2s.  co  m
            HashMap<String, Object> prefHashMap = (HashMap<String, Object>) prefs.getAll();
            if (updated || pushUpdated) {
                // There a slight difference between the data structure of SharedPreferences and
                // the one that the SDK understands. In SharedPreferences the value for
                // MEDIA_ICE_SERVERS_DISCOVERY_TYPE key is a String, which the SDK wants a
                // MediaIceServersDiscoveryType enum, so we need to convert between the 2.
                // In this case we remove the one and introduce the other
                String iceServersDiscoveryType = "0";
                if (prefHashMap.containsKey(RCDevice.ParameterKeys.MEDIA_ICE_SERVERS_DISCOVERY_TYPE)) {
                    iceServersDiscoveryType = (String) prefHashMap
                            .get(RCDevice.ParameterKeys.MEDIA_ICE_SERVERS_DISCOVERY_TYPE);
                    prefHashMap.remove(RCDevice.ParameterKeys.MEDIA_ICE_SERVERS_DISCOVERY_TYPE);
                }
                prefHashMap.put(RCDevice.ParameterKeys.MEDIA_ICE_SERVERS_DISCOVERY_TYPE,
                        RCDevice.MediaIceServersDiscoveryType.values()[Integer
                                .parseInt(iceServersDiscoveryType)]);

                // Same for candidate timeout
                String candidateTimeout = "0";
                if (prefHashMap.containsKey(RCConnection.ParameterKeys.DEBUG_CONNECTION_CANDIDATE_TIMEOUT)) {
                    candidateTimeout = (String) prefHashMap
                            .get(RCConnection.ParameterKeys.DEBUG_CONNECTION_CANDIDATE_TIMEOUT);
                    prefHashMap.remove(RCConnection.ParameterKeys.DEBUG_CONNECTION_CANDIDATE_TIMEOUT);
                }
                prefHashMap.put(RCConnection.ParameterKeys.DEBUG_CONNECTION_CANDIDATE_TIMEOUT,
                        Integer.parseInt(candidateTimeout));

                if (device.isInitialized()) {
                    device.reconfigure(params);
                } else {
                    //try to initialize with params
                    device.initialize(this, params, this);
                }

            }
            NavUtils.navigateUpFromSameTask(this);
        } catch (RCException e) {
            showOkAlert("Error saving Settings", e.errorText);
        }

        return true;
    }
    return super.onOptionsItemSelected(item);
}

From source file:playground.pieter.events.MultiModalFlowAndDensityCollector.java

public HashMap<Id, int[]> calculateOccupancy(HashMap<Id<Link>, int[]> deltaFlow,
        Map<Id<Link>, ? extends Link> links) {
    // send actual link info.)
    HashMap<Id, int[]> occupancy = new HashMap<>();

    for (Id linkId : deltaFlow.keySet()) {
        occupancy.put(linkId, null);/* www  . ja v a 2 s . c om*/
    }

    for (Id linkId : occupancy.keySet()) {

        int[] deltaflowBins = deltaFlow.get(linkId);// give labels to
        // deltaflowBins
        int[] occupancyBins = new int[deltaflowBins.length];
        Link link = links.get(linkId);
        occupancyBins[0] = deltaflowBins[0];
        for (int i = 1; i < deltaflowBins.length; i++) {
            occupancyBins[i] = (occupancyBins[i - 1] + deltaflowBins[i]);
        }

        occupancy.put(linkId, occupancyBins);
        deltaFlow.remove(linkId);
    }

    return occupancy;
}

From source file:org.apache.hadoop.chukwa.analysis.salsa.visualization.Swimlanes.java

public Table getData() {
    // preliminary setup
    OfflineTimeHandler time_offline;/*from w ww .  j  a v  a 2  s  .  c  o  m*/
    TimeHandler time_online;
    long start, end;

    if (offline_use) {
        time_offline = new OfflineTimeHandler(param_map, this.timezone);
        start = time_offline.getStartTime();
        end = time_offline.getEndTime();
    } else {
        time_online = new TimeHandler(this.request, this.timezone);
        start = time_online.getStartTime();
        end = time_online.getEndTime();
    }

    DatabaseWriter dbw = new DatabaseWriter(this.cluster);
    String query;

    // setup query
    if (this.shuffle_option != null && this.shuffle_option.equals("shuffles")) {
        query = "select job_id,friendly_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname from ["
                + this.table + "] where finish_time between '[start]' and '[end]'";
    } else {
        query = "select job_id,friendly_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname from ["
                + this.table
                + "] where finish_time between '[start]' and '[end]' and not state_name like 'shuffle_local' and not state_name like 'shuffle_remote'";
    }
    if (this.jobname != null) {
        query = query + " and job_id like '" + this.jobname + "'";
    }
    Macro mp = new Macro(start, end, query);
    query = mp.toString() + " order by start_time";

    Table rs_tab = null;
    DatabaseDataSource dds;
    DefaultSQLDataHandler dh = new DefaultSQLDataHandler();

    log.debug("Query: " + query);
    // execute query
    try {
        dds = ConnectionFactory.getDatabaseConnection(dbw.getConnection());
        rs_tab = dds.getData(query);
    } catch (prefuse.data.io.DataIOException e) {
        System.err.println("prefuse data IO error: " + e);
        log.warn("prefuse data IO error: " + e);
        return null;
    } catch (SQLException e) {
        System.err.println("Error in SQL: " + e + " in statement: " + query);
        log.warn("Error in SQL: " + e + " in statement: " + query);
        return null;
    }

    HashMap<String, Integer> state_counts = new HashMap<String, Integer>();
    HashSet<String> states = new HashSet<String>();
    for (int i = 0; i < rs_tab.getRowCount(); i++) {
        String curr_state = rs_tab.getString(i, "state_name");
        states.add(curr_state);
        Integer cnt = state_counts.get(curr_state);
        if (cnt == null) {
            state_counts.put(curr_state, new Integer(1));
        } else {
            state_counts.remove(curr_state);
            state_counts.put(curr_state, new Integer(cnt.intValue() + 1));
        }
    }

    log.info("Search complete: #cols: " + rs_tab.getColumnCount() + "; #rows: " + rs_tab.getRowCount());

    return rs_tab;
}

From source file:edu.arizona.kra.kim.impl.identity.PersonServiceImpl.java

@Override
public Map<String, String> convertPersonPropertiesToEntityProperties(Map<String, String> criteria) {
    if (LOG.isDebugEnabled()) {
        LOG.debug("convertPersonPropertiesToEntityProperties: " + criteria);
    }/*from www  . java2 s  .  c o  m*/
    boolean nameCriteria = false;
    boolean addressCriteria = false;
    boolean affiliationCriteria = false;
    boolean affiliationDefaultOnlyCriteria = false;
    boolean phoneCriteria = false;
    boolean emailCriteria = false;
    boolean employeeIdCriteria = false;
    // add base lookups for all person lookups
    HashMap<String, String> newCriteria = new HashMap<String, String>();
    newCriteria.putAll(baseLookupCriteria);

    newCriteria.put("entityTypeContactInfos.entityTypeCode", personEntityTypeLookupCriteria);

    if (criteria != null) {
        for (String key : criteria.keySet()) {
            //check active radio button
            if (key.equals(KIMPropertyConstants.Person.ACTIVE)) {
                newCriteria.put(criteriaConversion.get(KIMPropertyConstants.Person.ACTIVE),
                        criteria.get(KIMPropertyConstants.Person.ACTIVE));
            } else {
                // The following if statement enables the "both" button to work correctly.
                if (!(criteria.containsKey(KIMPropertyConstants.Person.ACTIVE))) {
                    newCriteria.remove(KIMPropertyConstants.Person.ACTIVE);
                }
            }

            // if no value was passed, skip the entry in the Map
            if (StringUtils.isEmpty(criteria.get(key))) {
                continue;
            }
            // check if the value needs to be encrypted
            // handle encrypted external identifiers
            if (key.equals(KIMPropertyConstants.Person.EXTERNAL_ID)
                    && StringUtils.isNotBlank(criteria.get(key))) {
                // look for a ext ID type property
                if (criteria.containsKey(KIMPropertyConstants.Person.EXTERNAL_IDENTIFIER_TYPE_CODE)) {
                    String extIdTypeCode = criteria
                            .get(KIMPropertyConstants.Person.EXTERNAL_IDENTIFIER_TYPE_CODE);
                    if (StringUtils.isNotBlank(extIdTypeCode)) {
                        // if found, load that external ID Type via service
                        EntityExternalIdentifierType extIdType = getIdentityService()
                                .getExternalIdentifierType(extIdTypeCode);
                        // if that type needs to be encrypted, encrypt the value in the criteria map
                        if (extIdType != null && extIdType.isEncryptionRequired()) {
                            try {
                                if (CoreApiServiceLocator.getEncryptionService().isEnabled()) {
                                    criteria.put(key, CoreApiServiceLocator.getEncryptionService()
                                            .encrypt(criteria.get(key)));
                                }
                            } catch (GeneralSecurityException ex) {
                                LOG.error("Unable to encrypt value for external ID search of type "
                                        + extIdTypeCode, ex);
                            }
                        }
                    }
                }
            }

            // convert the property to the Entity data model
            String entityProperty = criteriaConversion.get(key);
            if (entityProperty != null) {
                newCriteria.put(entityProperty, criteria.get(key));
            } else {
                entityProperty = key;
                // just pass it through if no translation present
                newCriteria.put(key, criteria.get(key));
            }
            // check if additional criteria are needed based on the types of properties specified
            if (isNameEntityCriteria(entityProperty)) {
                nameCriteria = true;
            }
            if (isAffiliationEntityCriteria(entityProperty)) {
                affiliationCriteria = true;
            }
            if (isAddressEntityCriteria(entityProperty)) {
                addressCriteria = true;
            }
            if (isPhoneEntityCriteria(entityProperty)) {
                phoneCriteria = true;
            }
            if (isEmailEntityCriteria(entityProperty)) {
                emailCriteria = true;
            }
            if (isEmployeeIdEntityCriteria(entityProperty)) {
                employeeIdCriteria = true;
            }
            // special handling for the campus code, since that forces the query to look
            // at the default affiliation record only
            if (key.equals("campusCode")) {
                affiliationDefaultOnlyCriteria = true;
            }
        }

        if (nameCriteria) {
            newCriteria.put(ENTITY_NAME_PROPERTY_PREFIX + "active", "Y");
            newCriteria.put(ENTITY_NAME_PROPERTY_PREFIX + "defaultValue", "Y");
            //newCriteria.put(ENTITY_NAME_PROPERTY_PREFIX + "nameCode", "PRFR");//so we only display 1 result
        }
        if (addressCriteria) {
            newCriteria.put(ENTITY_ADDRESS_PROPERTY_PREFIX + "active", "Y");
            newCriteria.put(ENTITY_ADDRESS_PROPERTY_PREFIX + "defaultValue", "Y");
        }
        if (phoneCriteria) {
            newCriteria.put(ENTITY_PHONE_PROPERTY_PREFIX + "active", "Y");
            newCriteria.put(ENTITY_PHONE_PROPERTY_PREFIX + "defaultValue", "Y");
        }
        if (emailCriteria) {
            newCriteria.put(ENTITY_EMAIL_PROPERTY_PREFIX + "active", "Y");
            newCriteria.put(ENTITY_EMAIL_PROPERTY_PREFIX + "defaultValue", "Y");
        }
        if (employeeIdCriteria) {
            newCriteria.put(ENTITY_EMPLOYEE_ID_PROPERTY_PREFIX + "active", "Y");
            newCriteria.put(ENTITY_EMPLOYEE_ID_PROPERTY_PREFIX + "primary", "Y");
        }
        if (affiliationCriteria) {
            newCriteria.put(ENTITY_AFFILIATION_PROPERTY_PREFIX + "active", "Y");
        }
        if (affiliationDefaultOnlyCriteria) {
            newCriteria.put(ENTITY_AFFILIATION_PROPERTY_PREFIX + "defaultValue", "Y");
        }
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("Converted: " + newCriteria);
    }
    return newCriteria;
}