Example usage for java.util LinkedHashMap isEmpty

List of usage examples for java.util LinkedHashMap isEmpty

Introduction

In this page you can find the example usage for java.util LinkedHashMap isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this map contains no key-value mappings.

Usage

From source file:org.zaizi.sensefy.api.utils.SensefyUserMapper.java

@SuppressWarnings("unchecked")
public static SensefyUser getSensefyUserFromPrincipal(Principal user) {

    SensefyUser sensefyUser = new SensefyUser();

    if (user != null) {
        OAuth2Authentication authUser = (OAuth2Authentication) user;
        if (authUser != null) {

            LinkedHashMap<String, Object> details = (LinkedHashMap<String, Object>) authUser
                    .getUserAuthentication().getDetails();

            if (details != null && !details.isEmpty() && details.containsKey("principal")) {
                LinkedHashMap<String, Object> principal = (LinkedHashMap<String, Object>) details
                        .get("principal");

                if (principal != null && !principal.isEmpty()) {
                    try {
                        BeanUtils.populate(sensefyUser, principal);
                    } catch (IllegalAccessException e) {
                        logger.debug(e.getMessage());
                    } catch (InvocationTargetException e) {
                        logger.debug(e.getMessage());
                    }//w  w w.j ava 2s  . co m
                }

            }

        }

    }
    return sensefyUser;
}

From source file:com.qwarz.graph.process.GraphProcess.java

public static void createUpdateNodes(GraphBase base, LinkedHashMap<String, GraphNode> nodes, Boolean upsert)
        throws IOException, URISyntaxException, ServerException {
    if (nodes == null || nodes.isEmpty())
        return;/*w  w w . j av a  2  s.c o m*/
    GraphProcessInterface graphImpl = getImplementation(base.data);
    if (upsert != null && upsert) {
        // If the nodes already exists, we merge them
        Map<String, GraphNode> dbNodes = graphImpl.getNodes(base, nodes.keySet());
        if (dbNodes != null) {
            for (Map.Entry<String, GraphNode> entry : nodes.entrySet()) {
                GraphNode dbNode = dbNodes.get(entry.getKey().intern());
                if (dbNode != null)
                    entry.getValue().add(dbNode);
            }
        }
    }
    graphImpl.createUpdateNodes(base, nodes);
}

From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java

/**
 * Check the state of a newly constructed, empty LinkedHashMap.
 *
 * @param hashMap/*from  ww w  . ja v a2s . com*/
 */
private static void checkEmptyLinkedHashMapAssumptions(LinkedHashMap<?, ?> hashMap) {
    assertNotNull(hashMap);
    assertTrue(hashMap.isEmpty());

    assertNotNull(hashMap.values());
    assertTrue(hashMap.values().isEmpty());
    assertTrue(hashMap.values().size() == 0);

    assertNotNull(hashMap.keySet());
    assertTrue(hashMap.keySet().isEmpty());
    assertTrue(hashMap.keySet().size() == 0);

    assertNotNull(hashMap.entrySet());
    assertTrue(hashMap.entrySet().isEmpty());
    assertTrue(hashMap.entrySet().size() == 0);

    assertNotNull(hashMap.entrySet().iterator());
    assertFalse(hashMap.entrySet().iterator().hasNext());
}

From source file:com.twitter.ambrose.hive.HiveDAGTransformer.java

/**
 * Gets all job aliases//from   w w  w .  j a  v  a 2  s  .  c  om
 * 
 * @param pathToAliases
 * @return
 */
private List<String> getAllJobAliases(LinkedHashMap<String, ArrayList<String>> pathToAliases) {
    if (pathToAliases == null || pathToAliases.isEmpty()) {
        return Collections.emptyList();
    }
    List<String> result = new ArrayList<String>();
    for (List<String> aliases : pathToAliases.values()) {
        if (aliases != null && !aliases.isEmpty()) {
            result.addAll(aliases);
        }
    }
    return result;
}

From source file:com.streamsets.pipeline.stage.processor.jdbcmetadata.JdbcMetadataProcessor.java

@Override
protected void process(Record record, BatchMaker batchMaker) throws StageException {
    try {/*from  w  ww  .j a  v a  2 s  .c o  m*/
        ELVars variables = getContext().createELVars();
        RecordEL.setRecordInContext(variables, record);
        TimeEL.setCalendarInContext(variables, Calendar.getInstance());
        TimeNowEL.setTimeNowInContext(variables, new Date());

        String schema = (schemaEL != null) ? elEvals.dbNameELEval.eval(variables, schemaEL, String.class)
                : null;
        String tableName = elEvals.tableNameELEval.eval(variables, tableNameEL, String.class);

        if (StringUtils.isEmpty(schema)) {
            schema = null;
        }

        // Obtain the record structure from current record
        LinkedHashMap<String, JdbcTypeInfo> recordStructure = JdbcMetastoreUtil.convertRecordToJdbcType(record,
                decimalDefaultsConfig.precisionAttribute, decimalDefaultsConfig.scaleAttribute, schemaWriter);

        if (recordStructure.isEmpty()) {
            batchMaker.addRecord(record);
            return;
        }

        LinkedHashMap<String, JdbcTypeInfo> tableStructure = null;
        try {
            tableStructure = tableCache.get(Pair.of(schema, tableName));
        } catch (ExecutionException e) {
            throw new JdbcStageCheckedException(JdbcErrors.JDBC_203, e.getMessage(), e);
        }

        if (tableStructure.isEmpty()) {
            // Create table
            schemaWriter.createTable(schema, tableName, recordStructure);
            tableCache.put(Pair.of(schema, tableName), recordStructure);
        } else {
            // Compare tables
            LinkedHashMap<String, JdbcTypeInfo> columnDiff = JdbcMetastoreUtil.getDiff(tableStructure,
                    recordStructure);
            if (!columnDiff.isEmpty()) {
                LOG.trace("Detected drift for table {} - new columns: {}", tableName,
                        StringUtils.join(columnDiff.keySet(), ","));
                schemaWriter.alterTable(schema, tableName, columnDiff);
                tableCache.put(Pair.of(schema, tableName), recordStructure);
            }
        }

        batchMaker.addRecord(record);
    } catch (JdbcStageCheckedException error) {
        LOG.error("Error happened when processing record", error);
        LOG.trace("Record that caused the error: {}", record.toString());
        errorRecordHandler.onError(new OnRecordErrorException(record, error.getErrorCode(), error.getParams()));
    }
}

From source file:com.fortify.processrunner.common.processor.AbstractProcessorUpdateIssueStateForVulnerabilities.java

/**
 * Process the current group of vulnerabilities (grouped by bug tracker deep link) to update the corresponding
 * previously submitted issue. This includes updating issue fields, re-opening issues if they have been closed
 * but there are open vulnerabilities, and closing issues if they are open but no open vulnerabilities are remaining.
 *//* w  w w .j  a v a  2 s .  c  om*/
@Override
protected boolean processMap(Context context, List<Object> vulnerabilities, LinkedHashMap<String, Object> map) {
    SubmittedIssue submittedIssue = getSubmittedIssue(vulnerabilities.get(0));
    String fieldNames = map.keySet().toString();
    if (map != null && !map.isEmpty() && updateIssueFields(context, submittedIssue, map)) {
        LOG.info(String.format("[%s] Updated field(s) %s for issue %s", getBugTrackerName(), fieldNames,
                submittedIssue.getDeepLink()));
    }
    if (hasOpenVulnerabilities(vulnerabilities)) {
        if (openIssueIfClosed(context, submittedIssue)) {
            LOG.info(String.format("[%s] Re-opened issue %s", getBugTrackerName(),
                    submittedIssue.getDeepLink()));
        }
    } else {
        if (closeIssueIfOpen(context, submittedIssue)) {
            LOG.info(String.format("[%s] Closed issue %s", getBugTrackerName(), submittedIssue.getDeepLink()));
        }
    }
    if (vulnerabilityUpdater != null) {
        vulnerabilityUpdater.updateVulnerabilityStateForExistingIssue(context, getBugTrackerName(),
                submittedIssue, getIssueStateDetailsRetriever(), vulnerabilities);
    }

    return true;
}

From source file:org.jamwiki.servlets.CategoryServlet.java

/**
 *
 *///from  www. jav  a2s  .  c  om
private void viewCategories(HttpServletRequest request, ModelAndView next, WikiPageInfo pageInfo)
        throws Exception {
    String virtualWiki = pageInfo.getVirtualWikiName();
    Pagination pagination = ServletUtil.loadPagination(request, next);
    List<Category> categoryObjects = WikiBase.getDataHandler().getAllCategories(virtualWiki, pagination);
    LinkedHashMap<String, String> categories = new LinkedHashMap<String, String>();
    for (Category category : categoryObjects) {
        String key = category.getName();
        String value = key.substring(Namespace.namespace(Namespace.CATEGORY_ID).getLabel(virtualWiki).length()
                + Namespace.SEPARATOR.length());
        categories.put(key, value);
    }
    next.addObject("categoryCount", categories.size());
    next.addObject("categories", categories);
    if (categories.isEmpty()) {
        pageInfo.addMessage(new WikiMessage("allcategories.message.none"));
    }
    pageInfo.setPageTitle(new WikiMessage("allcategories.title"));
    pageInfo.setContentJsp(JSP_CATEGORIES);
    pageInfo.setSpecial(true);
}

From source file:com.espertech.esper.filter.FilterSpecCompiler.java

private static FilterSpecParam handleProperty(FilterOperator op, ExprIdentNode identNodeLeft,
        ExprIdentNode identNodeRight, LinkedHashMap<String, Pair<EventType, String>> arrayEventTypes,
        String statementName) throws ExprValidationException {
    String propertyName = identNodeLeft.getResolvedPropertyName();

    Class leftType = identNodeLeft.getExprEvaluator().getType();
    Class rightType = identNodeRight.getExprEvaluator().getType();

    SimpleNumberCoercer numberCoercer = getNumberCoercer(leftType, rightType, propertyName);
    boolean isMustCoerce = numberCoercer != null;
    Class numericCoercionType = JavaClassHelper.getBoxedType(leftType);

    String streamName = identNodeRight.getResolvedStreamName();
    if (arrayEventTypes != null && !arrayEventTypes.isEmpty() && arrayEventTypes.containsKey(streamName)) {
        Pair<Integer, String> indexAndProp = getStreamIndex(identNodeRight.getResolvedPropertyName());
        return new FilterSpecParamEventPropIndexed(identNodeLeft.getFilterLookupable(), op,
                identNodeRight.getResolvedStreamName(), indexAndProp.getFirst(), indexAndProp.getSecond(),
                isMustCoerce, numberCoercer, numericCoercionType, statementName);
    }/*from w  w w  .ja  va  2s  .  c  om*/
    return new FilterSpecParamEventProp(identNodeLeft.getFilterLookupable(), op,
            identNodeRight.getResolvedStreamName(), identNodeRight.getResolvedPropertyName(), isMustCoerce,
            numberCoercer, numericCoercionType, statementName);
}

From source file:eionet.cr.dao.virtuoso.VirtuosoEndpointHarvestQueryDAO.java

@Override
public void move(String endpointUrl, Set<Integer> ids, int direction) throws DAOException {

    if (StringUtils.isBlank(endpointUrl) || ids == null || ids.isEmpty()) {
        return;// w w w  .  j a v  a 2  s.  c o  m
    }

    if (direction == 0) {
        throw new IllegalArgumentException("Direction must not be 0!");
    }

    // Prepare map where we can get queries by position, also find the max and min positions.
    LinkedHashMap<Integer, EndpointHarvestQueryDTO> queriesByPos = getQueriesByPosition(endpointUrl);
    if (queriesByPos.isEmpty()) {
        return;
    }
    Set<Integer> positions = queriesByPos.keySet();
    int maxPos = Collections.max(positions);
    int minPos = Collections.min(positions);

    Connection conn = null;
    try {
        conn = getSQLConnection();
        conn.setAutoCommit(false);

        // If even one query is already at position 1 then moving up is not considered possible.
        // And conversely, if even one query is already at the last position, then moving down
        // is not considered possible either.

        boolean isMovingPossible = true;
        List<Integer> selectedPositions = new ArrayList<Integer>();
        List<EndpointHarvestQueryDTO> queries = new ArrayList<EndpointHarvestQueryDTO>(queriesByPos.values());
        for (EndpointHarvestQueryDTO query : queries) {

            if (ids.contains(query.getId())) {

                int pos = query.getPosition();
                if ((direction < 0 && pos == minPos) || (direction > 0 && pos == maxPos)) {
                    isMovingPossible = false;
                } else {
                    selectedPositions.add(pos);
                }
            }
        }

        if (isMovingPossible) {

            if (direction < 0) {
                for (Integer selectedPosition : selectedPositions) {

                    EndpointHarvestQueryDTO queryToMove = queriesByPos.get(selectedPosition);
                    int i = queries.indexOf(queryToMove);
                    queries.set(i, queries.get(i - 1));
                    queries.set(i - 1, queryToMove);
                }
            } else {
                for (int j = selectedPositions.size() - 1; j >= 0; j--) {

                    EndpointHarvestQueryDTO queryToMove = queriesByPos.get(selectedPositions.get(j));
                    int i = queries.indexOf(queryToMove);
                    queries.set(i, queries.get(i + 1));
                    queries.set(i + 1, queryToMove);
                }
            }
        }

        SQLUtil.executeUpdate(INCREASE_POSITIONS_SQL, Arrays.asList(maxPos, endpointUrl), conn);
        for (int i = 0; i < queries.size(); i++) {
            SQLUtil.executeUpdate(UPDATE_POSITION_SQL, Arrays.asList(i + 1, queries.get(i).getId()), conn);
        }
        conn.commit();

    } catch (Exception e) {
        SQLUtil.rollback(conn);
        throw new DAOException(e.getMessage(), e);
    } finally {
        SQLUtil.close(conn);
    }
}

From source file:net.sf.maltcms.chromaui.normalization.spi.charts.PeakGroupRtBoxPlot.java

protected String getPeakName(IPeakGroupDescriptor pgd) {
    String rt = "mean rt: " + String.format("%.2f", pgd.getMeanApexTime()) + "+/-"
            + String.format("%.2f", pgd.getApexTimeStdDev()) + "; median rt: "
            + String.format("%.2f", pgd.getMedianApexTime()) + ": ";
    LinkedHashMap<String, Integer> names = new LinkedHashMap<>();
    if (!pgd.getDisplayName().equals(pgd.getName())) {
        return rt + pgd.getDisplayName();
    }/*from w w w  .  ja v a2 s .co  m*/
    for (IPeakAnnotationDescriptor ipad : pgd.getPeakAnnotationDescriptors()) {
        if (names.containsKey(ipad.getName())) {
            names.put(ipad.getName(), names.get(ipad.getName()) + 1);
        } else {
            names.put(ipad.getName(), 1);
        }
    }
    if (names.isEmpty()) {
        return rt + "<NA>";
    }
    if (names.size() > 1) {
        StringBuilder sb = new StringBuilder();
        for (String key : names.keySet()) {
            sb.append(key);
            sb.append(" (" + names.get(key) + ")");
            sb.append(" | ");
        }
        return rt + sb.replace(sb.length() - 1, sb.length() - 1, "").toString();
    } else {
        return rt + names.keySet().toArray(new String[0])[0];
    }
}