Example usage for java.util HashMap isEmpty

List of usage examples for java.util HashMap isEmpty

Introduction

In this page you can find the example usage for java.util HashMap isEmpty.

Prototype

public boolean isEmpty() 

Source Link

Document

Returns true if this map contains no key-value mappings.

Usage

From source file:org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils.java

/**
 * Set the key and value description for all the tasks rooted at the given
 * task. Loops over all the tasks recursively.
 *
 * @param task/*from   www.  j  a  v  a 2  s  . c o  m*/
 */
public static void setKeyAndValueDescForTaskTree(Task<? extends Serializable> task) {

    if (task instanceof ConditionalTask) {
        List<Task<? extends Serializable>> listTasks = ((ConditionalTask) task).getListTasks();
        for (Task<? extends Serializable> tsk : listTasks) {
            setKeyAndValueDescForTaskTree(tsk);
        }
    } else if (task instanceof ExecDriver) {
        MapredWork work = (MapredWork) task.getWork();
        work.getMapWork().deriveExplainAttributes();
        HashMap<String, Operator<? extends OperatorDesc>> opMap = work.getMapWork().getAliasToWork();
        if (opMap != null && !opMap.isEmpty()) {
            for (Operator<? extends OperatorDesc> op : opMap.values()) {
                setKeyAndValueDesc(work.getReduceWork(), op);
            }
        }
    } else if (task != null && (task.getWork() instanceof TezWork)) {
        TezWork work = (TezWork) task.getWork();
        for (BaseWork w : work.getAllWorkUnsorted()) {
            if (w instanceof MapWork) {
                ((MapWork) w).deriveExplainAttributes();
            }
        }
    } else if (task instanceof SparkTask) {
        SparkWork work = (SparkWork) task.getWork();
        for (BaseWork w : work.getAllWorkUnsorted()) {
            if (w instanceof MapWork) {
                ((MapWork) w).deriveExplainAttributes();
            }
        }
    }

    if (task.getChildTasks() == null) {
        return;
    }

    for (Task<? extends Serializable> childTask : task.getChildTasks()) {
        setKeyAndValueDescForTaskTree(childTask);
    }
}

From source file:org.apache.pig.impl.util.JarManager.java

private static void createPigScriptUDFJar(OutputStream os, PigContext pigContext,
        HashMap<String, String> contents) throws IOException {
    JarOutputStream jarOutputStream = new JarOutputStream(os);
    for (String path : pigContext.scriptFiles) {
        log.debug("Adding entry " + path + " to job jar");
        InputStream stream = null;
        File inputFile = new File(path);
        if (inputFile.exists()) {
            stream = new FileInputStream(inputFile);
        } else {/*from  w  w  w  .j  ava  2 s.  com*/
            stream = PigContext.getClassLoader().getResourceAsStream(path);
        }
        if (stream == null) {
            throw new IOException("Cannot find " + path);
        }
        try {
            addStream(jarOutputStream, path, stream, contents, inputFile.lastModified());
        } finally {
            stream.close();
        }
    }
    for (Map.Entry<String, File> entry : pigContext.getScriptFiles().entrySet()) {
        log.debug("Adding entry " + entry.getKey() + " to job jar");
        InputStream stream = null;
        if (entry.getValue().exists()) {
            stream = new FileInputStream(entry.getValue());
        } else {
            stream = PigContext.getClassLoader().getResourceAsStream(entry.getValue().getPath());
        }
        if (stream == null) {
            throw new IOException("Cannot find " + entry.getValue().getPath());
        }
        try {
            addStream(jarOutputStream, entry.getKey(), stream, contents, entry.getValue().lastModified());
        } finally {
            stream.close();
        }
    }
    if (!contents.isEmpty()) {
        jarOutputStream.close();
    } else {
        os.close();
    }
}

From source file:pathwaynet.PathwayCalculator.java

private <E> HashMap<E, TestResultForEachVertex> testForEachComponent(Graph<E, String> graph,
        Collection<E> componentsInGroup, Collection<E> componentsConsidered, boolean onlyFromSource) {
    HashMap<E, TestResultForEachVertex> significance = new HashMap<>();

    // calculate and cache all distances
    DijkstraDistance<E, String> distances = new DijkstraDistance<>(graph);
    HashMap<E, Map<E, Number>> distancesMap = new HashMap<>();
    graph.getVertices().stream().forEach((component) -> {
        Map<E, Number> distancesFromThis = distances.getDistanceMap(component);
        distancesMap.put(component, distancesFromThis);
    });/*from   www.j a va  2  s  .  com*/

    // calculate real in-group and out-group distances
    HashMap<E, Map<E, Number>> distancesInsideGroup = getDistancesWithGroup(distancesMap, componentsInGroup,
            componentsConsidered, onlyFromSource, true);
    HashMap<E, Map<E, Number>> distancesOutsideGroup = getDistancesWithGroup(distancesMap, componentsInGroup,
            componentsConsidered, onlyFromSource, false);

    if (distancesInsideGroup.isEmpty() || distancesOutsideGroup.isEmpty()) {
        System.err.println("WARNING: Please double check the enzyme list!");
    } else {
        HashMap<E, ArrayList<Double>> differencesProp = new HashMap<>();
        distancesInsideGroup.keySet().stream().forEach((component) -> {
            ArrayList<Double> diffIncreaseProp = estimateDifferenceOfProportionAtDistances(
                    distancesInsideGroup.get(component).values(),
                    distancesOutsideGroup.get(component).values());
            differencesProp.put(component, diffIncreaseProp);
            //System.err.println(enzyme.getID()+"\t"+diffIncreaseProp);
        });

        // for each enzyme in the given group, estimate its significance of neighbor enrichment of enzymes in the group
        //System.err.println();
        distancesInsideGroup.keySet().stream().forEach((component) -> {
            // do permutation (for numPermutations times) to generate random group with the same size and with this enzyme
            HashSet<E> allComponentsAvailable = new HashSet<>();
            allComponentsAvailable.addAll(graph.getVertices());
            allComponentsAvailable.retainAll(componentsConsidered);
            ArrayList<HashSet<E>> componentsInGroupPermutations = generatePermutatedGroupsWithFixedNode(
                    component, allComponentsAvailable, distancesInsideGroup.size());

            // for each permutation, calculate the differences of proportion between within-group and between-group path at each path length
            ArrayList<ArrayList<Double>> differencesPropPermutations = new ArrayList<>();
            componentsInGroupPermutations.stream().forEach((componentsInGroupThisPermutation) -> {
                HashSet<E> componentsOutGroupThisPermutation = new HashSet<>();
                componentsOutGroupThisPermutation.addAll(graph.getVertices());
                componentsOutGroupThisPermutation.removeAll(componentsInGroupThisPermutation);

                HashMap<E, Number> distancesInPermut = new HashMap<>();
                HashMap<E, Number> distancesOutPermut = new HashMap<>();
                allComponentsAvailable.forEach((component2) -> {
                    Number minDist = getShortestDistance(distancesMap, component, component2, onlyFromSource);

                    if (componentsInGroupThisPermutation.contains(component2) && (!component.equals(component2))
                            && minDist != null)
                        distancesInPermut.put(component2, minDist);
                    else if (componentsOutGroupThisPermutation.contains(component2) && minDist != null)
                        distancesOutPermut.put(component2, minDist);
                });
                differencesPropPermutations.add(estimateDifferenceOfProportionAtDistances(
                        distancesInPermut.values(), distancesOutPermut.values()));
            });

            // calculate the significance
            // P: based on Pearson's correlation between differences of proportions and distances
            // domain: based on the quantile of difference at each distance
            //System.err.println(component);
            double p = calculatePValue(differencesProp.get(component), differencesPropPermutations);
            int radius = estimateDomainRadius(differencesProp.get(component), differencesPropPermutations, 0.9);
            significance.put(component, new TestResultForEachVertex(p, radius));

            if (cache) {

            }
        });
    }

    return significance;
}

From source file:com.epam.ta.reportportal.events.handler.TicketActivitySubscriber.java

private List<Activity> processTestItemIssues(String projectName, String principal,
        Map<IssueDefinition, TestItem> data) {
    String emptyString = "";
    List<Activity> activities = new ArrayList<>();
    final Project projectSettings = projectSettingsRepository.findOne(projectName);
    Set<Map.Entry<IssueDefinition, TestItem>> entries = data.entrySet();
    for (Map.Entry<IssueDefinition, TestItem> entry : entries) {
        IssueDefinition issueDefinition = entry.getKey();
        TestItem testItem = entry.getValue();
        TestItemIssue testItemIssue = testItem.getIssue();
        String oldIssueDescription = testItemIssue.getIssueDescription();
        StatisticSubType statisticSubType = projectSettings.getConfiguration()
                .getByLocator(issueDefinition.getIssue().getIssueType());
        String oldIssueType = projectSettings.getConfiguration().getByLocator(testItemIssue.getIssueType())
                .getLongName();// w  w w .  ja  v  a 2 s  .co  m
        String initialComment = issueDefinition.getIssue().getComment();
        String comment = (null != initialComment) ? initialComment.trim() : emptyString;
        if (null == oldIssueDescription) {
            oldIssueDescription = emptyString;
        }
        Activity activity = activityBuilder.get().addProjectRef(projectName)
                .addLoggedObjectRef(issueDefinition.getId()).addObjectType(TestItem.TEST_ITEM)
                .addActionType(UPDATE_ITEM).addUserRef(principal).build();
        HashMap<String, Activity.FieldValues> history = new HashMap<>();
        if (!oldIssueDescription.equals(comment)) {

            Activity.FieldValues fieldValues = Activity.FieldValues.newOne().withOldValue(oldIssueDescription)
                    .withNewValue(comment);
            history.put(COMMENT, fieldValues);
        }
        if (statisticSubType != null
                && ((null == oldIssueType) || !oldIssueType.equalsIgnoreCase(statisticSubType.getLongName()))) {
            Activity.FieldValues fieldValues = Activity.FieldValues.newOne().withOldValue(oldIssueType)
                    .withNewValue(statisticSubType.getLongName());
            history.put(ISSUE_TYPE, fieldValues);
        }
        if (!history.isEmpty()) {
            activity.setHistory(history);
            activities.add(activity);
        }
    }
    return activities;
}

From source file:org.opensilk.music.library.folders.provider.FoldersLibraryProvider.java

@NonNull
public static List<File> filterAudioFiles(Context context, List<File> files) {
    if (files.size() == 0) {
        return Collections.emptyList();
    }/*from   w w  w .jav  a2 s  .c om*/
    //Map for cursor
    final HashMap<String, File> pathMap = new HashMap<>();
    //The returned list
    final List<File> audioFiles = new ArrayList<>();

    //Build the selection
    final int size = files.size();
    final StringBuilder selection = new StringBuilder();
    selection.append(MediaStore.Files.FileColumns.DATA + " IN (");
    for (int i = 0; i < size; i++) {
        final File f = files.get(i);
        final String path = f.getAbsolutePath();
        pathMap.put(path, f); //Add file to map while where iterating
        selection.append("'").append(StringUtils.replace(path, "'", "''")).append("'");//TODO it would probably be better to use selectionArgs
        if (i < size - 1) {
            selection.append(",");
        }
    }
    selection.append(")");
    Cursor c = null;
    try {
        c = context.getContentResolver().query(MediaStore.Files.getContentUri("external"),
                MEDIA_TYPE_PROJECTION, selection.toString(), null, null);
        if (c != null && c.moveToFirst()) {
            do {
                final int mediaType = c.getInt(0);
                final String path = c.getString(1);
                final File f = pathMap.remove(path);
                if (f != null && mediaType == MediaStore.Files.FileColumns.MEDIA_TYPE_AUDIO) {
                    audioFiles.add(f);
                } //else throw away
            } while (c.moveToNext());
        }
        //either the query failed or the cursor didn't contain all the files we asked for.
        if (!pathMap.isEmpty()) {
            Timber.w("%d files weren't found in mediastore. Best guessing mime type", pathMap.size());
            for (File f : pathMap.values()) {
                final String mime = guessMimeType(f);
                if (StringUtils.contains(mime, "audio") || "application/ogg".equals(mime)) {
                    audioFiles.add(f);
                }
            }
        }
    } catch (Exception e) {
        if (ModelUtil.DUMPSTACKS)
            Timber.e(e, "filterAudioFiles");
    } finally {
        closeQuietly(c);
    }
    return audioFiles;
}

From source file:org.apache.hadoop.hive.ql.parse.mr2.GenMR2Utils.java

/**
 * Set the key and value description for all the tasks rooted at the given
 * task. Loops over all the tasks recursively.
 *
 * @param task/*  w  ww .  j av  a2s.c  o m*/
 */
public static void setKeyAndValueDescForTaskTree(Task<? extends Serializable> task) {

    if (task instanceof ConditionalTask) {
        List<Task<? extends Serializable>> listTasks = ((ConditionalTask) task).getListTasks();
        for (Task<? extends Serializable> tsk : listTasks) {
            setKeyAndValueDescForTaskTree(tsk);
        }
    } else if (task instanceof MR2ExecDriver) {
        MR2Work work = (MR2Work) task.getWork();
        work.getMapWork().deriveExplainAttributes();
        HashMap<String, Operator<? extends OperatorDesc>> opMap = work.getMapWork().getAliasToWork();
        if (opMap != null && !opMap.isEmpty()) {
            for (Operator<? extends OperatorDesc> op : opMap.values()) {
                setKeyAndValueDesc(work.getReduceWork(), op);
            }
        }
    } else if (task != null && (task.getWork() instanceof TezWork)) {
        TezWork work = (TezWork) task.getWork();
        for (BaseWork w : work.getAllWorkUnsorted()) {
            if (w instanceof MapWork) {
                ((MapWork) w).deriveExplainAttributes();
            }
        }
    } else if (task instanceof SparkTask) {
        SparkWork work = (SparkWork) task.getWork();
        for (BaseWork w : work.getAllWorkUnsorted()) {
            if (w instanceof MapWork) {
                ((MapWork) w).deriveExplainAttributes();
            }
        }
    }

    if (task.getChildTasks() == null) {
        return;
    }

    for (Task<? extends Serializable> childTask : task.getChildTasks()) {
        setKeyAndValueDescForTaskTree(childTask);
    }
}

From source file:org.j2free.invoker.InvokerFilter.java

/**
 * Locks to prevent request processing while mapping is added.
 *
 * Finds all classes annotated with ServletConfig and maps the class to
 * the url specified in the annotation.  Wildcard mapping are allowed in
 * the form of *.extension or /some/path/*
 *
 * @param context an active ServletContext
 *///  w w w  .  j  a v a  2 s  .c  o  m
public void load(final ServletContext context) {
    try {
        write.lock();

        LinkedList<URL> urlList = new LinkedList<URL>();
        urlList.addAll(Arrays.asList(ClasspathUrlFinder.findResourceBases(EMPTY)));
        urlList.addAll(Arrays.asList(WarUrlFinder.findWebInfLibClasspaths(context)));

        URL[] urls = new URL[urlList.size()];
        urls = urlList.toArray(urls);

        AnnotationDB annoDB = new AnnotationDB();
        annoDB.setScanClassAnnotations(true);
        annoDB.setScanFieldAnnotations(false);
        annoDB.setScanMethodAnnotations(false);
        annoDB.setScanParameterAnnotations(false);
        annoDB.scanArchives(urls);

        HashMap<String, Set<String>> annotationIndex = (HashMap<String, Set<String>>) annoDB
                .getAnnotationIndex();
        if (annotationIndex != null && !annotationIndex.isEmpty()) {
            //-----------------------------------------------------------
            // Look for any classes annotated with @ServletConfig
            Set<String> classNames = annotationIndex.get(ServletConfig.class.getName());

            if (classNames != null) {
                for (String c : classNames) {
                    try {
                        final Class<? extends HttpServlet> klass = (Class<? extends HttpServlet>) Class
                                .forName(c);

                        if (klass.isAnnotationPresent(ServletConfig.class)) {
                            final ServletConfig config = (ServletConfig) klass
                                    .getAnnotation(ServletConfig.class);

                            // If the config specifies String mapppings...
                            if (config.mappings() != null) {
                                for (String url : config.mappings()) {
                                    // Leave the asterisk, we'll add it when matching...
                                    //if (url.matches("(^\\*[^*]*?)|([^*]*?/\\*$)"))
                                    //    url = url.replace("*", EMPTY);

                                    url = url.toLowerCase(); // all comparisons are lower-case

                                    if (urlMap.putIfAbsent(url, klass) == null) {
                                        if (log.isDebugEnabled())
                                            log.debug("Mapping servlet " + klass.getName() + " to path " + url);
                                    } else
                                        log.error("Unable to map servlet  " + klass.getName() + " to path "
                                                + url + ", path already mapped to "
                                                + urlMap.get(url).getName());
                                }
                            }

                            // If the config specifies a regex mapping...
                            if (!empty(config.regex())) {
                                regexMap.putIfAbsent(config.regex(), klass);
                                if (log.isDebugEnabled())
                                    log.debug("Mapping servlet " + klass.getName() + " to regex path "
                                            + config.regex());
                            }

                            // Create an instance of the servlet and init it
                            HttpServlet servlet = klass.newInstance();
                            servlet.init(new ServletConfigImpl(klass.getName(), context));

                            // Store a reference
                            servletMap.put(klass, new ServletMapping(servlet, config));
                        }

                    } catch (Exception e) {
                        log.error("Error registering servlet [name=" + c + "]", e);
                    }
                }
            }

            //-----------------------------------------------------------
            // Look for any classes annotated with @FiltersConfig
            classNames = annotationIndex.get(FilterConfig.class.getName());
            if (classNames != null) {
                for (String c : classNames) {
                    try {
                        final Class<? extends Filter> klass = (Class<? extends Filter>) Class.forName(c);

                        if (klass.isAnnotationPresent(FilterConfig.class)) {
                            final FilterConfig config = (FilterConfig) klass.getAnnotation(FilterConfig.class);

                            // Create an instance of the servlet and init it
                            Filter filter = klass.newInstance();
                            filter.init(new FilterConfigImpl(klass.getName(), context));

                            if (log.isDebugEnabled())
                                log.debug("Mapping filter " + klass.getName() + " to path " + config.match());

                            // Store a reference
                            filters.add(new FilterMapping(filter, config));
                        }
                    } catch (Exception e) {
                        log.error("Error registering servlet [name=" + c + "]", e);
                    }
                }
            }
        }
    } catch (IOException e) {
        log.error("Error loading urlMappings", e);
    } finally {
        write.unlock(); // ALWAYS Release the configure lock
    }
}

From source file:com.google.gwt.emultest.java.util.HashMapTest.java

public void testIsEmpty() {
    HashMap<String, String> srcMap = new HashMap<String, String>();
    checkEmptyHashMapAssumptions(srcMap);

    HashMap<String, String> dstMap = new HashMap<String, String>();
    checkEmptyHashMapAssumptions(dstMap);

    dstMap.putAll(srcMap);//from w  w w.  j a  v a2 s .  c o m
    assertTrue(dstMap.isEmpty());

    dstMap.put(KEY_KEY, VALUE_VAL);
    assertFalse(dstMap.isEmpty());

    dstMap.remove(KEY_KEY);
    assertTrue(dstMap.isEmpty());
    assertEquals(dstMap.size(), 0);
}

From source file:com.google.gwt.emultest.java.util.HashMapTest.java

public void testClear() {
    HashMap<String, String> hashMap = new HashMap<String, String>();
    checkEmptyHashMapAssumptions(hashMap);

    hashMap.put("Hello", "Bye");
    assertFalse(hashMap.isEmpty());
    assertTrue(hashMap.size() == SIZE_ONE);

    hashMap.clear();/*from ww w  .  ja v a 2  s .c  om*/
    assertTrue(hashMap.isEmpty());
    assertTrue(hashMap.size() == 0);
}

From source file:org.lexevs.dao.database.ibatis.valuesets.IbatisPickListEntryNodeDao.java

@SuppressWarnings("unchecked")
@Override/*w w w.  j av  a2s. c  om*/
public PickListEntryNode resolvePLEntryNodeByRevision(String pickListId, String plEntryId, String revisionId)
        throws LBRevisionException {

    String prefix = this.getPrefixResolver().resolveDefaultPrefix();

    String vsPLEntryUId = this.getPickListEntryNodeUId(pickListId, plEntryId);

    String tempRevId = revisionId;

    if (vsPLEntryUId == null) {
        throw new LBRevisionException("PLEntry " + plEntryId + " doesn't exist in lexEVS. "
                + "Please check the plEntryId. Its possible that the given pickListEntry "
                + "has been REMOVEd from the lexEVS system in the past.");
    }

    String plEntryRevisionId = this.getLatestRevision(vsPLEntryUId);

    // 1. If 'revisionId' is null or 'revisionId' is the latest revision of the picklistEntry
    // then use getPLEntryById to get the PLEntry object and return.

    if (StringUtils.isEmpty(revisionId) || StringUtils.isEmpty(plEntryRevisionId)
            || revisionId.equals(plEntryRevisionId)) {
        return getPLEntryByUId(vsPLEntryUId);
    }

    // 2. Get the earliest revisionId on which change was applied on given 
    // PLEntry with reference given revisionId.

    HashMap revisionIdMap = (HashMap) this.getSqlMapClientTemplate().queryForMap(
            GET_PREV_REV_ID_FROM_GIVEN_REV_ID_FOR_PLENTRY_SQL,
            new PrefixedParameterTuple(prefix, vsPLEntryUId, revisionId), "revId", "revAppliedDate");

    if (revisionIdMap.isEmpty()) {
        revisionId = null;
    } else {

        revisionId = (String) revisionIdMap.keySet().toArray()[0];

        if (revisionId.equals(plEntryRevisionId)) {
            return getPLEntryByUId(vsPLEntryUId);
        }
    }

    // 3. Get the pick list entry data from history.
    PickListEntryNode pickListEntryNode = null;
    InsertOrUpdatePickListEntryBean plEntryBean = null;

    plEntryBean = (InsertOrUpdatePickListEntryBean) this.getSqlMapClientTemplate().queryForObject(
            GET_PICKLIST_ENTRY_METADATA_FROM_HISTORY_BY_REVISION_SQL,
            new PrefixedParameterTuple(prefix, vsPLEntryUId, revisionId));

    if (plEntryBean != null) {

        pickListEntryNode = getPLEntryNode(plEntryBean);

        // Get pick list definition context
        if (pickListEntryNode != null) {
            PickListEntryNodeChoice entryNodeChoice = pickListEntryNode.getPickListEntryNodeChoice();

            if (entryNodeChoice.getInclusionEntry() != null) {
                List<String> contextList = this.getSqlMapClientTemplate().queryForList(
                        GET_CONTEXT_LIST_FROM_HISTORY_BY_PARENT_ENTRYSTATEGUID_AND_TYPE_SQL,
                        new PrefixedParameterTuple(prefix, plEntryBean.getEntryStateUId(),
                                ReferenceType.PICKLISTENTRY.name()));

                entryNodeChoice.getInclusionEntry().setPickContext(contextList);
            }
        }
    }

    // 4. If pick list entry is not in history, get it from base table.
    if (pickListEntryNode == null && revisionId != null) {

        InsertOrUpdatePickListEntryBean plEntryNodeBean = (InsertOrUpdatePickListEntryBean) this
                .getSqlMapClientTemplate().queryForObject(GET_PICKLIST_ENTRYNODE_METADATA_BY_PLENTRY_GUID_SQL,
                        new PrefixedParameter(prefix, vsPLEntryUId));

        if (plEntryNodeBean != null) {

            pickListEntryNode = getPLEntryNode(plEntryNodeBean);
        }
    }

    // 5. Get all pick list entry node property.
    if (pickListEntryNode != null) {
        List<String> propertyIdList = this.getSqlMapClientTemplate().queryForList(
                GET_ENTRYNODE_PROPERTY_IDS_LIST_BY_ENTRYNODE_UID_SQL,
                new PrefixedParameterTuple(prefix, vsPLEntryUId, ReferenceType.PICKLISTENTRY.name()));

        Properties properties = new Properties();

        for (String propId : propertyIdList) {
            Property pickListEntryProperty = null;

            try {
                pickListEntryProperty = vsPropertyDao.resolveVSPropertyByRevision(vsPLEntryUId, propId,
                        tempRevId);
            } catch (LBRevisionException e) {
                continue;
            }

            properties.addProperty(pickListEntryProperty);
        }

        pickListEntryNode.setProperties(properties);
    }

    return pickListEntryNode;
}