Example usage for java.util HashSet clear

List of usage examples for java.util HashSet clear

Introduction

In this page you can find the example usage for java.util HashSet clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this set.

Usage

From source file:org.tellervo.desktop.bulkdataentry.command.ImportSelectedSamplesCommand.java

/**
 * @see com.dmurph.mvc.control.ICommand#execute(com.dmurph.mvc.MVCEvent)
 *///  w w  w.java 2  s.  co  m
@Override
public void execute(MVCEvent argEvent) {
    BulkImportModel model = BulkImportModel.getInstance();
    SampleModel smodel = model.getSampleModel();
    SampleTableModel tmodel = smodel.getTableModel();

    ElementModel emodel = model.getElementModel();

    MVCArrayList<TridasElement> elementlist = emodel.getImportedList();
    ElementTableModel etablemodel = emodel.getTableModel();

    ArrayList<IBulkImportSingleRowModel> selected = new ArrayList<IBulkImportSingleRowModel>();
    tmodel.getSelected(selected);

    // here is where we verify they contain required info
    HashSet<String> requiredMessages = new HashSet<String>();
    ArrayList<SingleSampleModel> incompleteModels = new ArrayList<SingleSampleModel>();

    HashSet<String> definedProps = new HashSet<String>();
    for (IBulkImportSingleRowModel srm : selected) {
        SingleSampleModel som = (SingleSampleModel) srm;
        definedProps.clear();
        for (String s : SingleSampleModel.TABLE_PROPERTIES) {
            if (som.getProperty(s) != null) {
                definedProps.add(s);
            }
        }
        if (smodel.isRadiusWithSample()) {
            for (String s : SingleRadiusModel.PROPERTIES) {
                if (som.getRadiusModel().getProperty(s) != null) {
                    definedProps.add(s);
                }
            }
        }
        boolean incomplete = false;

        // object
        if (!definedProps.contains(SingleSampleModel.OBJECT)) {
            requiredMessages.add("Cannot import without a parent object.");
            incomplete = true;
        }

        // element 
        if (!definedProps.contains(SingleSampleModel.ELEMENT)) {
            requiredMessages.add("Cannot import without a parent element.");
            incomplete = true;
        } else if (fixTempParentCodes(som, emodel)) {
            // There was a temp code but it is fixed now
        } else {
            requiredMessages.add("Cannot import as parent element has not been created yet");
            incomplete = true;
        }

        // type
        if (!definedProps.contains(SingleSampleModel.TYPE)) {
            requiredMessages.add("Sample must contain a type.");
            incomplete = true;
        }

        // title
        if (!definedProps.contains(SingleSampleModel.TITLE)) {
            requiredMessages.add("Sample must have a title.");
            incomplete = true;
        }

        if (smodel.isRadiusWithSample()) {
            if (!definedProps.contains(SingleRadiusModel.TITLE)) {
                requiredMessages.add("Radius title must be populated.");
                incomplete = true;
            }
        }

        if (incomplete) {
            incompleteModels.add(som);
        }
    }

    if (!incompleteModels.isEmpty()) {
        StringBuilder message = new StringBuilder();
        message.append("Please correct the following errors:\n");
        message.append(StringUtils.join(requiredMessages.toArray(), "\n"));
        Alert.message(model.getMainView(), "Importing Errors", message.toString());
        return;
    }

    // now we actually create the models
    int i = 0;
    for (IBulkImportSingleRowModel srm : selected) {
        SingleSampleModel ssm = (SingleSampleModel) srm;
        TridasSample origSample = new TridasSample();

        if (!ssm.isDirty()) {
            System.out.println("Object isn't dirty, not saving/updating: "
                    + ssm.getProperty(SingleSampleModel.TITLE).toString());
        }

        ssm.populateToTridasSample(origSample);

        Object e = ssm.getProperty(SingleSampleModel.ELEMENT);
        TridasElement parentElement = null;
        if (e instanceof TridasElementOrPlaceholder) {
            parentElement = ((TridasElementOrPlaceholder) e).getTridasElement();
        } else if (e instanceof TridasElement) {
            parentElement = (TridasElement) e;
        }

        // sample first
        EntityResource<TridasSample> sampleResource;
        if (origSample.getIdentifier() != null) {
            sampleResource = new EntityResource<TridasSample>(origSample, TellervoRequestType.UPDATE,
                    TridasSample.class);
        } else {
            sampleResource = new EntityResource<TridasSample>(origSample, parentElement, TridasSample.class);
        }
        sampleResource.setProperty(TellervoResourceProperties.ENTITY_REQUEST_FORMAT,
                TellervoRequestFormat.SUMMARY);

        // set up a dialog...
        Window parentWindow = SwingUtilities.getWindowAncestor(model.getMainView());
        TellervoResourceAccessDialog dialog = new TellervoResourceAccessDialog(parentWindow, sampleResource, i,
                selected.size());

        sampleResource.query();
        dialog.setVisible(true);

        if (!dialog.isSuccessful()) {
            JOptionPane.showMessageDialog(BulkImportModel.getInstance().getMainView(),
                    I18n.getText("error.savingChanges") + "\r\n" + I18n.getText("error") + ": "
                            + dialog.getFailException().getLocalizedMessage(),
                    I18n.getText("error"), JOptionPane.ERROR_MESSAGE);
            continue;
        }
        ssm.populateFromTridasSample(sampleResource.getAssociatedResult());
        ssm.setDirty(false);
        tmodel.setSelected(ssm, false);

        // add to imported list or update existing
        if (origSample.getIdentifier() != null) {
            TridasSample found = null;
            for (TridasSample tox : model.getSampleModel().getImportedList()) {
                if (tox.getIdentifier().getValue().equals(origSample.getIdentifier().getValue())) {
                    found = tox;
                    break;
                }
            }
            if (found == null) {
                //Alert.error("Error updating model", "Couldn't find the object in the model to update, please report bug.");
            } else {
                sampleResource.getAssociatedResult().copyTo(found);
            }
        } else {
            model.getSampleModel().getImportedList().add(sampleResource.getAssociatedResult());
        }

        if (ssm.getRadiusModel() != null) {
            // now lets do the radius
            TridasRadius origRadius = new TridasRadius();
            ssm.getRadiusModel().populateToTridasRadius(origRadius);

            TridasSample parentSample = sampleResource.getAssociatedResult();

            // sample first
            EntityResource<TridasRadius> radiusResource;
            if (origRadius.getIdentifier() != null) {
                radiusResource = new EntityResource<TridasRadius>(origRadius, TellervoRequestType.UPDATE,
                        TridasRadius.class);
            } else {
                radiusResource = new EntityResource<TridasRadius>(origRadius, parentSample, TridasRadius.class);
            }

            // set up a dialog...
            parentWindow = SwingUtilities.getWindowAncestor(model.getMainView());
            dialog = TellervoResourceAccessDialog.forWindow(parentWindow, radiusResource);

            radiusResource.query();
            dialog.setVisible(true);

            if (!dialog.isSuccessful()) {
                JOptionPane.showMessageDialog(BulkImportModel.getInstance().getMainView(),
                        I18n.getText("error.savingChanges") + "\r\n" + I18n.getText("error") + ": "
                                + dialog.getFailException().getLocalizedMessage(),
                        I18n.getText("error"), JOptionPane.ERROR_MESSAGE);
                continue;
            }
            ssm.getRadiusModel().populateFromTridasRadius(radiusResource.getAssociatedResult());
            ssm.getRadiusModel().setDirty(false);
            tmodel.setSelected(ssm, false);
        }
        i++;
    }
}

From source file:org.commoncrawl.mapred.ec2.postprocess.crawldb.CrawlDBMergingReducer.java

/** 
 * given html content (json object), extract out of domain hrefs and cache them 
 * and ... update stats /*from  ww w . ja v a 2  s. c o m*/
 * @param crawlStats
 * @param incomingJSONObject
 * @param extHRefs
 * @param fpSource
 * @param reporter
 */
static void updateLinkStatsFromHTMLContent(JsonObject crawlStats, JsonObject incomingJSONObject,
        HashSet<String> extHRefs, URLFPV2 fpSource, Reporter reporter) {
    JsonArray links = incomingJSONObject.getAsJsonArray("links");

    if (links == null) {
        reporter.incrCounter(Counters.NULL_LINKS_ARRAY, 1);
    } else {

        // clear our snapshot of externally referenced urls 
        // we only want to capture this information from 
        // the links extracted via the latest content
        if (extHRefs != null)
            extHRefs.clear();

        int intraDomainLinkCount = 0;
        int intraRootLinkCount = 0;
        int interDomainLinkCount = 0;

        for (JsonElement link : links) {
            JsonObject linkObj = link.getAsJsonObject();
            if (linkObj != null && linkObj.has("href")) {
                String href = linkObj.get("href").getAsString();
                GoogleURL urlObject = new GoogleURL(href);
                if (urlObject.isValid()) {
                    URLFPV2 linkFP = URLUtils.getURLFPV2FromURLObject(urlObject);
                    if (linkFP != null) {
                        if (linkFP.getRootDomainHash() == fpSource.getRootDomainHash()) {
                            if (linkFP.getDomainHash() == fpSource.getDomainHash()) {
                                intraDomainLinkCount++;
                            } else {
                                intraRootLinkCount++;
                            }
                        } else {
                            interDomainLinkCount++;
                            // track domains we link to
                            if (extHRefs != null) {
                                if (extHRefs.size() <= MAX_EXTERNALLY_REFERENCED_URLS) {
                                    extHRefs.add(urlObject.getCanonicalURL());
                                }
                            }
                        }
                    }
                }
            }
        }
        // update counts in crawl stats data structure ... 
        crawlStats.addProperty(CRAWLDETAIL_INTRADOMAIN_LINKS, intraDomainLinkCount);
        crawlStats.addProperty(CRAWLDETAIL_INTRAROOT_LINKS, intraRootLinkCount);
        crawlStats.addProperty(CRAWLDETAIL_INTERDOMAIN_LINKS, interDomainLinkCount);

        if (interDomainLinkCount <= 100) {
            reporter.incrCounter(Counters.INTERDOMAIN_LINKS_LTEQ_100, 1);
        } else if (interDomainLinkCount <= 1000) {
            reporter.incrCounter(Counters.INTERDOMAIN_LINKS_LTEQ_1000, 1);
        } else {
            reporter.incrCounter(Counters.INTERDOMAIN_LINKS_GT_1000, 1);
        }
    }
}

From source file:org.apache.ddlutils.io.DatabaseDataIO.java

/**
 * Sorts the given table according to their foreign key order.
 * /* w w  w . ja v a2  s .  com*/
 * @param tables The tables
 * @return The sorted tables
 */
private List sortTables(Table[] tables) {
    ArrayList result = new ArrayList();
    HashSet processed = new HashSet();
    ListOrderedMap pending = new ListOrderedMap();

    for (int idx = 0; idx < tables.length; idx++) {
        Table table = tables[idx];

        if (table.getForeignKeyCount() == 0) {
            result.add(table);
            processed.add(table);
        } else {
            HashSet waitedFor = new HashSet();

            for (int fkIdx = 0; fkIdx < table.getForeignKeyCount(); fkIdx++) {
                Table waitedForTable = table.getForeignKey(fkIdx).getForeignTable();

                if (!table.equals(waitedForTable)) {
                    waitedFor.add(waitedForTable);
                }
            }
            pending.put(table, waitedFor);
        }
    }

    HashSet newProcessed = new HashSet();

    while (!processed.isEmpty() && !pending.isEmpty()) {
        newProcessed.clear();
        for (Iterator it = pending.entrySet().iterator(); it.hasNext();) {
            Map.Entry entry = (Map.Entry) it.next();
            Table table = (Table) entry.getKey();
            HashSet waitedFor = (HashSet) entry.getValue();

            waitedFor.removeAll(processed);
            if (waitedFor.isEmpty()) {
                it.remove();
                result.add(table);
                newProcessed.add(table);
            }
        }
        processed.clear();

        HashSet tmp = processed;

        processed = newProcessed;
        newProcessed = tmp;
    }
    // the remaining are within circular dependencies
    for (Iterator it = pending.keySet().iterator(); it.hasNext();) {
        result.add(it.next());
    }
    return result;
}

From source file:net.antidot.sql.model.db.StdTable.java

public void indexesRows(ForeignKey fk) {
    HashMap<HashSet<String>, HashSet<Row>> indexedRows = new HashMap<HashSet<String>, HashSet<Row>>();
    HashSet<String> columnNames = new HashSet<String>();
    for (Row r : body.getRows()) {
        for (String columnName : fk.getReferenceKey().getColumnNames()) {
            // Save values of columns in the fk for current row
            final byte[] bs = r.getValues().get(columnName);
            columnNames.add(new String(bs));
        }/*  ww w  .  j a va  2  s .c o  m*/
        log.debug("[Table:indexesRows] Row r = " + r + " columnNames = " + columnNames);
        if (indexedRows.get(columnNames) == null) {
            indexedRows.put(new HashSet<String>(columnNames), new HashSet<Row>());
        }
        indexedRows.get(columnNames).add(r);
        columnNames.clear();
    }
    indexedRowsByFk.put(fk, indexedRows);
}

From source file:br.com.bioscada.apps.biotracks.fragments.ChooseActivityDialogFragment.java

/**
 * Processes a group of items with the same label.
 * /*ww w . ja v a  2 s  .com*/
 * @param resolveInfos list of resolve infos
 * @param displayInfos list of display infos
 * @param start start index
 * @param end end index
 */
private void processGroup(List<ResolveInfo> resolveInfos, List<DisplayInfo> displayInfos, int start, int end) {
    ResolveInfo startResolveInfo = resolveInfos.get(start);
    CharSequence primaryLabel = startResolveInfo.loadLabel(packageManager);
    Drawable icon = startResolveInfo.loadIcon(packageManager);

    int num = end - start + 1;
    if (num == 1) {
        // Only one, set the secondary label to null
        displayInfos.add(new DisplayInfo(startResolveInfo, primaryLabel, null, icon));
    } else {
        // Decide package name or application name for the secondary label
        boolean usePackageName = false;
        CharSequence appName = startResolveInfo.activityInfo.applicationInfo.loadLabel(packageManager);
        if (appName == null) {
            usePackageName = true;
        } else {
            // Use HashSet to track duplicates
            HashSet<CharSequence> duplicates = new HashSet<CharSequence>();
            duplicates.add(appName);
            for (int i = start + 1; i <= end; i++) {
                ResolveInfo resolveInfo = resolveInfos.get(i);
                CharSequence name = resolveInfo.activityInfo.applicationInfo.loadLabel(packageManager);
                if ((name == null) || (duplicates.contains(name))) {
                    usePackageName = true;
                    break;
                } else {
                    duplicates.add(name);
                }
            }
            // Clear HashSet for later use
            duplicates.clear();
        }
        for (int i = start; i <= end; i++) {
            ResolveInfo resolveInfo = resolveInfos.get(i);
            CharSequence secondaryLabel = usePackageName ? resolveInfo.activityInfo.packageName
                    : resolveInfo.activityInfo.applicationInfo.loadLabel(packageManager);
            displayInfos.add(new DisplayInfo(resolveInfo, primaryLabel, secondaryLabel, icon));
        }
    }
}

From source file:chanupdater.ChanUpdater.java

private void doUpdates() throws SQLException, IOException, FileNotFoundException, LdvTableException {
    if (verbose > 1) {
        System.out.println("Starting update process.");
    }/* www  . j a v  a 2 s  . c  om*/
    ArrayList<ChanListSummary> chanLists;

    HashSet<ChanInfo> del = new HashSet<>();
    totalAdds = 0;
    totalDels = 0;

    for (ChanListSummary cls : cLists) {

        cls.printSummary();
        String server = cls.getServer();
        String cTyp = cls.getcType();

        if (verbose > 2) {
            System.out.format("Check %1$s for type:%2$s ", server, cTyp);
        }

        TreeMap<String, HashSet<ChanInfo>> chanSets = cls.getChanSets();
        for (Entry<String, HashSet<ChanInfo>> ent : chanSets.entrySet()) {
            del.clear();
            HashSet<ChanInfo> newChans = ent.getValue();
            String ifo = ent.getKey();
            if (verbose > 1) {
                System.out.format("Server: %1$s, cType: %2$s, IFO: %3$s, count: %4$,d\n", cls.getServer(),
                        cls.getcType(), ifo, newChans.size());
            }
            String namePat = ifo + ":%";
            TreeSet<ChanInfo> oldSet = chnTbl.getAsSet(server, namePat, cTyp, newChans.size());

            for (ChanInfo old : oldSet) {
                boolean gotit = newChans.contains(old);
                if (gotit) {
                    // it's in both
                    newChans.remove(old);
                } else {
                    if (old.isAvailable()) {
                        // only in old add it to be deleted set
                        del.add(old);
                    }
                }
            }
            totalAdds += newChans.size();
            totalDels += del.size();

            if ((newChans.size() > 0 || del.size() > 0)) {
                if (verbose > 1) {
                    System.out.format("    add: %1$d, del %2$d\n", newChans.size(), del.size());
                }
                for (ChanInfo ci : newChans) {
                    if (verbose > 2) {
                        System.out.print("Add: ");
                        ci.print();
                    }
                    chnTbl.insertNewBulk(ci);
                }
                if (newChans.size() > 0) {
                    chnTbl.insertNewBulk(null); // complete the bulk insert
                }
                if (doDeletes) {
                    for (ChanInfo ci : del) {
                        if (verbose > 2) {
                            System.out.print("Del: ");
                            ci.print();
                        }
                        chnTbl.setAvailable(ci.getId(), false);
                    }
                }
            } else if (verbose > 1) {
                System.out.println("    no updates.");
            }

        }
        if (verbose > 0 && totalAdds + totalDels > 0) {
            System.out.format("Total additions: %1$,d, total removals: %2$,d, " + "Server: %3$s, type: %4$s%n",
                    totalAdds, totalDels, cls.getServer(), cls.getcType());
        } else if (verbose > 1 && totalAdds + totalDels == 0) {
            System.out.println("No changes to channel table. %n");
        }
    }
}

From source file:org.osaf.cosmo.service.impl.StandardContentServiceTest.java

public void testUpdateCollectionWithChildren() throws Exception {
    User user = testHelper.makeDummyUser();
    CollectionItem rootCollection = contentDao.createRootItem(user);

    CollectionItem dummyCollection = new MockCollectionItem();
    dummyCollection.setName("foo");
    dummyCollection.setOwner(user);//  w  w w. j  a  v  a 2  s.  c  o m

    ContentItem dummyContent1 = new MockNoteItem();
    dummyContent1.setName("bar1");
    dummyContent1.setOwner(user);

    ContentItem dummyContent2 = new MockNoteItem();
    dummyContent2.setName("bar2");
    dummyContent2.setOwner(user);

    HashSet<Item> children = new HashSet<Item>();
    children.add(dummyContent1);
    children.add(dummyContent2);

    dummyCollection = service.createCollection(rootCollection, dummyCollection, children);

    assertEquals(2, dummyCollection.getChildren().size());

    ContentItem bar1 = getContentItemFromSet(dummyCollection.getChildren(), "bar1");
    ContentItem bar2 = getContentItemFromSet(dummyCollection.getChildren(), "bar2");
    assertNotNull(bar1);
    assertNotNull(bar2);

    bar1.setIsActive(false);

    ContentItem bar3 = new MockNoteItem();
    bar3.setName("bar3");
    bar3.setOwner(user);

    children.clear();
    children.add(bar1);
    children.add(bar2);
    children.add(bar3);

    dummyCollection = service.updateCollection(dummyCollection, children);

    assertEquals(2, dummyCollection.getChildren().size());

    bar1 = getContentItemFromSet(dummyCollection.getChildren(), "bar1");
    bar2 = getContentItemFromSet(dummyCollection.getChildren(), "bar2");
    bar3 = getContentItemFromSet(dummyCollection.getChildren(), "bar3");

    assertNull(bar1);
    assertNotNull(bar2);
    assertNotNull(bar3);
}

From source file:co.rewen.statex.StateXModule.java

/**
 * Given an array of keys, this returns a map of (key, value) pairs for the keys found, and
 * (key, null) for the keys that haven't been found.
 *//*  w  w  w.  j  a  v a2 s  .  c  o m*/
@ReactMethod
public void multiGet(final ReadableArray keys, final Callback callback) {
    if (keys == null) {
        callback.invoke(AsyncStorageErrorUtil.getInvalidKeyError(null), null);
        return;
    }

    new GuardedAsyncTask<Void, Void>(getReactApplicationContext()) {
        @Override
        protected void doInBackgroundGuarded(Void... params) {
            if (!ensureDatabase()) {
                callback.invoke(AsyncStorageErrorUtil.getDBError(null), null);
                return;
            }

            String[] columns = { KEY_COLUMN, VALUE_COLUMN };
            HashSet<String> keysRemaining = SetBuilder.newHashSet();
            WritableArray data = Arguments.createArray();
            for (int keyStart = 0; keyStart < keys.size(); keyStart += MAX_SQL_KEYS) {
                int keyCount = Math.min(keys.size() - keyStart, MAX_SQL_KEYS);
                Cursor cursor = mStateXDatabaseSupplier.get().query(TABLE_STATE, columns,
                        AsyncLocalStorageUtil.buildKeySelection(keyCount),
                        AsyncLocalStorageUtil.buildKeySelectionArgs(keys, keyStart, keyCount), null, null,
                        null);
                keysRemaining.clear();
                try {
                    if (cursor.getCount() != keys.size()) {
                        // some keys have not been found - insert them with null into the final array
                        for (int keyIndex = keyStart; keyIndex < keyStart + keyCount; keyIndex++) {
                            keysRemaining.add(keys.getString(keyIndex));
                        }
                    }

                    if (cursor.moveToFirst()) {
                        do {
                            WritableArray row = Arguments.createArray();
                            row.pushString(cursor.getString(0));
                            row.pushString(cursor.getString(1));
                            data.pushArray(row);
                            keysRemaining.remove(cursor.getString(0));
                        } while (cursor.moveToNext());
                    }
                } catch (Exception e) {
                    FLog.w(ReactConstants.TAG, e.getMessage(), e);
                    callback.invoke(AsyncStorageErrorUtil.getError(null, e.getMessage()), null);
                    return;
                } finally {
                    cursor.close();
                }

                for (String key : keysRemaining) {
                    WritableArray row = Arguments.createArray();
                    row.pushString(key);
                    row.pushNull();
                    data.pushArray(row);
                }
                keysRemaining.clear();
            }

            callback.invoke(null, data);
        }
    }.execute();
}

From source file:com.android.utils.AccessibilityNodeInfoUtils.java

public static boolean shouldFocusNode(final AccessibilityNodeInfoCompat node,
        final Map<AccessibilityNodeInfoCompat, Boolean> speakingNodeCache, boolean checkChildren) {
    if (node == null) {
        return false;
    }//from   w  ww. j  a va  2 s  .  c om

    // Inside views that support web navigation, we delegate focus to the view itself and
    // assume that it navigates to and focuses the correct elements.
    if (WebInterfaceUtils.supportsWebActions(node)) {
        return true;
    }

    if (!isVisible(node)) {
        LogUtils.log(AccessibilityNodeInfoUtils.class, Log.VERBOSE, "Don't focus, node is not visible");
        return false;
    }

    // Only allow node with same bounds as window if it is clickable or leaf.
    if (areBoundsIdenticalToWindow(node) && !isClickable(node) && node.getChildCount() > 0) {
        LogUtils.log(AccessibilityNodeInfoUtils.class, Log.VERBOSE,
                "Don't focus, node bounds are same as window root node bounds");
        return false;
    }

    HashSet<AccessibilityNodeInfoCompat> visitedNodes = new HashSet<>();
    try {
        boolean accessibilityFocusable = isAccessibilityFocusableInternal(node, speakingNodeCache,
                visitedNodes);

        if (!checkChildren) {
            // End of the line. Don't check children and don't allow any recursion.
            return accessibilityFocusable;
        }

        if (accessibilityFocusable) {
            AccessibilityNodeInfoUtils.recycleNodes(visitedNodes);
            visitedNodes.clear();
            // TODO: This may still result in focusing non-speaking nodes, but it
            // won't prevent unlabeled buttons from receiving focus.
            if (!hasVisibleChildren(node)) {
                LogUtils.log(AccessibilityNodeInfoUtils.class, Log.VERBOSE,
                        "Focus, node is focusable and has no visible children");
                return true;
            } else if (isSpeakingNode(node, speakingNodeCache, visitedNodes)) {
                LogUtils.log(AccessibilityNodeInfoUtils.class, Log.VERBOSE,
                        "Focus, node is focusable and has something to speak");
                return true;
            } else {
                LogUtils.log(AccessibilityNodeInfoUtils.class, Log.VERBOSE,
                        "Don't focus, node is focusable but has nothing to speak");
                return false;
            }
        }
    } finally {
        AccessibilityNodeInfoUtils.recycleNodes(visitedNodes);
    }

    // If this node has no focusable ancestors, but it still has text,
    // then it should receive focus from navigation and be read aloud.
    NodeFilter filter = new NodeFilter() {
        @Override
        public boolean accept(AccessibilityNodeInfoCompat node) {
            return shouldFocusNode(node, speakingNodeCache, false);
        }
    };

    if (!hasMatchingAncestor(node, filter) && hasText(node)) {
        LogUtils.log(AccessibilityNodeInfoUtils.class, Log.VERBOSE,
                "Focus, node has text and no focusable ancestors");
        return true;
    }

    LogUtils.log(AccessibilityNodeInfoUtils.class, Log.VERBOSE, "Don't focus, failed all focusability tests");
    return false;
}

From source file:StorageEngineClient.CombineFileInputFormat.java

private void processsplitForUnsplit(JobConf job, Map.Entry<String, List<OneBlockInfo>> one,
        HashMap<OneBlockInfo, String[]> blockToNodes, long maxSize, long minSizeNode, long minSizeRack,
        List<CombineFileSplit> splits, String type) {
    ArrayList<OneBlockInfo> validBlocks = new ArrayList<OneBlockInfo>();
    ArrayList<String> nodes = new ArrayList<String>();
    long curSplitSize = 0;
    if (type.equals("node"))
        nodes.add(one.getKey());//from w w w  . j a v a  2  s .co m

    List<OneBlockInfo> blocks = null;
    if (!type.equals("all")) {
        blocks = one.getValue();
    } else {
        blocks = new ArrayList<OneBlockInfo>();
        blocks.addAll(blockToNodes.keySet());
    }

    OneBlockInfo[] blocksInNodeArr = blocks.toArray(new OneBlockInfo[blocks.size()]);
    if (job.getBoolean("hive.merge.inputfiles.sort", true)) {
        Arrays.sort(blocksInNodeArr, new Comparator<OneBlockInfo>() {
            @Override
            public int compare(OneBlockInfo o1, OneBlockInfo o2) {
                long comparereuslt = o2.length - o1.length;
                int result = 0;
                if (comparereuslt > 0)
                    result = 1;

                if (comparereuslt < 0)
                    result = -1;

                return result;
            }
        });
    }

    if (job.getBoolean("hive.merge.inputfiles.rerange", false)) {
        Random r = new Random(123456);
        OneBlockInfo tmp = null;
        for (int i = 0; i < blocksInNodeArr.length; i++) {
            int idx = r.nextInt(blocksInNodeArr.length);
            tmp = blocksInNodeArr[i];
            blocksInNodeArr[i] = blocksInNodeArr[idx];
            blocksInNodeArr[idx] = tmp;
        }
    }

    int maxFileNumPerSplit = job.getInt("hive.merge.inputfiles.maxFileNumPerSplit", 1000);

    for (int i = 0; i < blocksInNodeArr.length; i++) {
        if (blockToNodes.containsKey(blocksInNodeArr[i])) {
            if (!type.equals("node")) {
                nodes.clear();
            }

            curSplitSize = blocksInNodeArr[i].length;
            validBlocks.clear();
            validBlocks.add(blocksInNodeArr[i]);
            blockToNodes.remove(blocksInNodeArr[i]);
            if (maxSize != 0 && curSplitSize >= maxSize) {
                if (!type.equals("node")) {
                    for (int k = 0; k < blocksInNodeArr[i].hosts.length; k++) {
                        nodes.add(blocksInNodeArr[i].hosts[k]);
                    }
                }
                addCreatedSplit(job, splits, nodes, validBlocks);
            } else {
                int filenum = 1;
                for (int j = i + 1; j < blocksInNodeArr.length; j++) {
                    if (blockToNodes.containsKey(blocksInNodeArr[j])) {
                        long size1 = blocksInNodeArr[j].length;
                        if (maxSize != 0 && curSplitSize < maxSize) {
                            curSplitSize += size1;
                            filenum++;
                            validBlocks.add(blocksInNodeArr[j]);
                            blockToNodes.remove(blocksInNodeArr[j]);
                        }
                        if (filenum >= maxFileNumPerSplit) {
                            break;
                        }

                        if (curSplitSize >= maxSize) {
                            break;
                        }
                    }
                }
                if (minSizeNode != 0 && curSplitSize >= minSizeNode) {
                    if (!type.equals("node")) {
                        generateNodesInfo(validBlocks, nodes);
                    }

                    addCreatedSplit(job, splits, nodes, validBlocks);
                } else {
                    for (OneBlockInfo oneblock : validBlocks) {
                        blockToNodes.put(oneblock, oneblock.hosts);
                    }
                    break;
                }
            }
        }
    }

    HashSet<OneBlockInfo> hs = new HashSet<OneBlockInfo>();
    while (blockToNodes.size() > 0) {
        validBlocks = new ArrayList<OneBlockInfo>();
        nodes = new ArrayList<String>();
        int filenum = 0;
        hs.clear();
        for (OneBlockInfo blockInfo : blockToNodes.keySet()) {
            filenum++;
            validBlocks.add(blockInfo);

            hs.add(blockInfo);
            if (filenum >= maxFileNumPerSplit) {
                break;
            }
        }
        for (OneBlockInfo blockInfo : hs) {
            blockToNodes.remove(blockInfo);
        }

        generateNodesInfo(validBlocks, nodes);

        this.addCreatedSplit(job, splits, nodes, validBlocks);
    }
}