Example usage for java.util Set clear

List of usage examples for java.util Set clear

Introduction

In this page you can find the example usage for java.util Set clear.

Prototype

void clear();

Source Link

Document

Removes all of the elements from this set (optional operation).

Usage

From source file:ddf.security.expansion.impl.AbstractExpansion.java

@Override
public Set<String> expand(String key, Set<String> values) {
    Set<String> result;
    // if there's nothing to expand, just return
    if ((values == null) || (values.isEmpty())) {
        return values;
    }/*from   www.  ja  va 2s  .c o m*/

    // if no rules have been established yet, return the original
    if ((expansionTable == null) || (expansionTable.isEmpty())) {
        return values;
    }

    // if they didn't specify a key value, just return the original string
    if ((key == null) || (key.isEmpty())) {
        LOGGER.warn("Expand called with a null key value - no expansion attempted.");
        return values;
    }

    List<String[]> mappingRuleList = expansionTable.get(key);

    // if there are not matching keys in the expansion table - return the original string
    if (mappingRuleList == null) {
        return values;
    }

    /*
     * This expansion loop builds on itself, so the order of the rules is important - the
     * expanded set of strings is processed for expansion by subsequent rules.
     * 
     * Each list element in the expansion table is a two-element array with the regular
     * expression to search for and the replacement value. The replacement value can be empty in
     * which case the found value is deleted.
     */
    result = values;
    String original;
    String expandedValue;
    Set<String> temp;
    Set<String> expandedSet = new HashSet<String>();
    Set<String> currentSet = new HashSet<String>();
    currentSet.addAll(values);
    LOGGER.debug("Original key of {} with value[s]: {}", key, values);
    for (String[] rule : mappingRuleList) {
        expandedSet.clear();
        if ((rule != null) && (rule.length == 2)) {
            if ((rule[0] != null) && (!rule[0].isEmpty())) {
                LOGGER.trace("Processing expansion entry: {} => {}", rule[0], rule[1]);
                // now go through and expand each string in the passed in set
                for (String s : currentSet) {
                    original = s;
                    expandedValue = doExpansion(s, rule);
                    LOGGER.debug("Expanded value from '{}' to '{}'", original, expandedValue);
                    expandedSet.addAll(split(expandedValue, attributeSeparator));
                }
            }
        } else {
            LOGGER.warn("Expansion table contains invalid entries - skipping.");
        }
        temp = currentSet;
        currentSet = expandedSet;
        expandedSet = temp;
    }

    LOGGER.debug("Expanded result for key {} is {}", key, currentSet);
    // update the original set passed in for expansion
    values.clear();
    values.addAll(currentSet);
    return currentSet;
}

From source file:nl.mvdr.umvc3replayanalyser.ocr.TesseractOCREngine.java

/**
 * Matches the given string to a character's name.
 * //  w  ww  . ja  va  2  s  . c  om
 * @param text
 *            text to be matched, should be a Marvel character name
 * @param possibleCharacters
 *            the characters that text may match, may not be empty
 * @return the character to whose name the given text is closest
 * @throws OCRException
 *             in case the matching character cannot be uniquely determined
 */
private Umvc3Character matchToCharacterName(String text, Set<Umvc3Character> possibleCharacters)
        throws OCRException {
    if (log.isDebugEnabled()) {
        if (possibleCharacters.size() == Umvc3Character.values().length) {
            log.debug(String.format("Attempting to match %s to the UMvC3 characters", text));
        } else {
            log.debug(String.format("Attempting to match %s to the following characters: %s", text,
                    possibleCharacters));
        }
    }

    // Compute the minimal Levenshtein distance between the given text and the uppercase character names.
    int minimalDistance = Integer.MAX_VALUE;
    Set<Umvc3Character> matchingCharacters = EnumSet.noneOf(Umvc3Character.class);

    for (Umvc3Character character : possibleCharacters) {
        int distance = StringUtils.getLevenshteinDistance(character.getName().toUpperCase(), text);
        if (distance < minimalDistance) {
            minimalDistance = distance;
            matchingCharacters.clear();
            matchingCharacters.add(character);
        } else if (distance == minimalDistance) {
            matchingCharacters.add(character);
        }
    }

    // matchingCharacters is not empty, since there must be at least one character with a distance less than
    // Integer.MAX_INT.
    Umvc3Character result;
    if (1 < matchingCharacters.size()) {
        // More than one match found.
        result = handleMultipleMatches(text, minimalDistance, matchingCharacters);
    } else {
        // Exactly one match, return it.
        result = matchingCharacters.iterator().next();
    }

    if (log.isDebugEnabled()) {
        log.debug(String.format("Match found: %s. levenshtein(%s, %s) = %s", result,
                result.getName().toUpperCase(), text, "" + minimalDistance));
    }
    return result;
}

From source file:org.apache.sling.scripting.sightly.impl.engine.extension.URIManipulationFilterExtension.java

private void handleSelectors(RuntimeObjectModel runtimeObjectModel, Set<String> selectors,
        Map<String, Object> options) {
    if (options.containsKey(SELECTORS)) {
        Object selectorsOption = options.get(SELECTORS);
        if (selectorsOption == null) {
            // we want to remove all selectors
            selectors.clear();
        } else if (selectorsOption instanceof String) {
            String selectorString = (String) selectorsOption;
            String[] selectorsArray = selectorString.split("\\.");
            replaceSelectors(selectors, selectorsArray);
        } else if (selectorsOption instanceof Object[]) {
            Object[] selectorsURIArray = (Object[]) selectorsOption;
            String[] selectorsArray = new String[selectorsURIArray.length];
            int index = 0;
            for (Object selector : selectorsURIArray) {
                selectorsArray[index++] = runtimeObjectModel.toString(selector);
            }/*w  ww .  ja va2s  .  c  o  m*/
            replaceSelectors(selectors, selectorsArray);
        }
    }
    Object addSelectorsOption = options.get(ADD_SELECTORS);
    if (addSelectorsOption instanceof String) {
        String selectorString = (String) addSelectorsOption;
        String[] selectorsArray = selectorString.split("\\.");
        addSelectors(selectors, selectorsArray);
    } else if (addSelectorsOption instanceof Object[]) {
        Object[] selectorsURIArray = (Object[]) addSelectorsOption;
        String[] selectorsArray = new String[selectorsURIArray.length];
        int index = 0;
        for (Object selector : selectorsURIArray) {
            selectorsArray[index++] = runtimeObjectModel.toString(selector);
        }
        addSelectors(selectors, selectorsArray);
    }
    Object removeSelectorsOption = options.get(REMOVE_SELECTORS);
    if (removeSelectorsOption instanceof String) {
        String selectorString = (String) removeSelectorsOption;
        String[] selectorsArray = selectorString.split("\\.");
        removeSelectors(selectors, selectorsArray);
    } else if (removeSelectorsOption instanceof Object[]) {
        Object[] selectorsURIArray = (Object[]) removeSelectorsOption;
        String[] selectorsArray = new String[selectorsURIArray.length];
        int index = 0;
        for (Object selector : selectorsURIArray) {
            selectorsArray[index++] = runtimeObjectModel.toString(selector);
        }
        removeSelectors(selectors, selectorsArray);
    }

}

From source file:edu.lafayette.metadb.web.metadata.UpdateMultipleMetadata.java

private Set<Integer> filterRecords(String source) {
    Set<Integer> records = new HashSet<Integer>();
    String[] processedEntries = source.split(",");
    try {/*from ww  w .j a  v a 2  s.c o m*/
        for (String entry : processedEntries) {
            if (entry != null && entry != "") {
                if (entry.indexOf("-") != -1) {
                    int min = Integer.parseInt(entry.split("-")[0]);
                    int max = Integer.parseInt(entry.split("-")[1]);
                    for (int i = min; i <= max; i++)
                        records.add(i);
                } else
                    records.add(Integer.parseInt(entry));
            }
        }
    } catch (Exception e) {
        records.clear();
    }
    return records;

}

From source file:org.apache.syncope.core.persistence.jpa.content.XMLContentExporter.java

private List<String> sortByForeignKeys(final String dbSchema, final Connection conn,
        final Set<String> tableNames) throws SQLException {

    Set<MultiParentNode<String>> roots = new HashSet<>();

    final DatabaseMetaData meta = conn.getMetaData();

    final Map<String, MultiParentNode<String>> exploited = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
    final Set<String> pkTableNames = new HashSet<>();

    for (String tableName : tableNames) {
        MultiParentNode<String> node = exploited.get(tableName);
        if (node == null) {
            node = new MultiParentNode<>(tableName);
            roots.add(node);/* w w  w .  j a va2s . co m*/
            exploited.put(tableName, node);
        }

        pkTableNames.clear();

        ResultSet rs = null;
        try {
            rs = meta.getImportedKeys(conn.getCatalog(), dbSchema, tableName);

            // this is to avoid repetition
            while (rs.next()) {
                pkTableNames.add(rs.getString("PKTABLE_NAME"));
            }
        } finally {
            if (rs != null) {
                try {
                    rs.close();
                } catch (SQLException e) {
                    LOG.error("While closing tables result set", e);
                }
            }
        }

        for (String pkTableName : pkTableNames) {
            if (!tableName.equalsIgnoreCase(pkTableName)) {
                MultiParentNode<String> pkNode = exploited.get(pkTableName);
                if (pkNode == null) {
                    pkNode = new MultiParentNode<>(pkTableName);
                    roots.add(pkNode);
                    exploited.put(pkTableName, pkNode);
                }

                pkNode.addChild(node);

                if (roots.contains(node)) {
                    roots.remove(node);
                }
            }
        }
    }

    final List<String> sortedTableNames = new ArrayList<>(tableNames.size());
    MultiParentNodeOp.traverseTree(roots, sortedTableNames);

    // remove from sortedTableNames any table possibly added during lookup 
    // but matching some item in this.tablePrefixesToBeExcluded
    sortedTableNames.retainAll(tableNames);

    LOG.debug("Tables after retainAll {}", sortedTableNames);

    Collections.reverse(sortedTableNames);

    return sortedTableNames;
}

From source file:com.kalessil.phpStorm.yii2inspections.inspectors.MissingPropertyAnnotationsInspector.java

@Override
@NotNull//from   w  w w. ja  v  a2 s .  co  m
public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, boolean isOnTheFly) {
    return new PhpElementVisitor() {
        @Override
        public void visitPhpClass(PhpClass clazz) {
            /* check only regular named classes */
            final PsiElement nameNode = NamedElementUtil.getNameIdentifier(clazz);
            if (null == nameNode) {
                return;
            }

            /* check if the class inherited from yii\base\Object */
            boolean supportsPropertyFeature = false;
            final Set<PhpClass> parents = InheritanceChainExtractUtil.collect(clazz);
            if (!parents.isEmpty()) {
                for (final PhpClass parent : parents) {
                    if (baseObjectClasses.contains(parent.getFQN())) {
                        supportsPropertyFeature = true;
                        break;
                    }
                }

                parents.clear();
            }
            if (!supportsPropertyFeature) {
                return;
            }

            /* iterate get methods, find matching set methods */
            final Map<String, String> props = this.findPropertyCandidates(clazz);
            if (props.size() > 0) {
                List<String> names = new ArrayList<>(props.keySet());
                Collections.sort(names);
                final String message = messagePattern.replace("%p%", String.join("', '", names));
                holder.registerProblem(nameNode, message, ProblemHighlightType.WEAK_WARNING,
                        new TheLocalFix(props));
            }
        }

        @NotNull
        private Map<String, String> findPropertyCandidates(@NotNull PhpClass clazz) {
            final Map<String, String> properties = new HashMap<>();

            /* extract methods and operate on name-methods relations */
            final Method[] methods = clazz.getOwnMethods();
            if (null == methods || 0 == methods.length) {
                return properties;
            }
            final Map<String, Method> mappedMethods = new HashMap<>();
            for (Method method : methods) {
                mappedMethods.put(method.getName(), method);
            }

            /* process extracted methods*/
            for (String candidate : mappedMethods.keySet()) {
                Method getterMethod = null;
                Method setterMethod = null;

                /* extract methods: get (looks up and extracts set), set (looks up get and skipped if found) */
                if (candidate.startsWith("get")) {
                    getterMethod = mappedMethods.get(candidate);
                    if (getterMethod.isStatic() || 0 != getterMethod.getParameters().length) {
                        getterMethod = null;
                    }

                    final String complimentarySetter = candidate.replaceAll("^get", "set");
                    if (mappedMethods.containsKey(complimentarySetter)) {
                        setterMethod = mappedMethods.get(complimentarySetter);
                        if (setterMethod.isStatic() || 0 == setterMethod.getParameters().length) {
                            setterMethod = null;
                        }

                    }
                }
                if (candidate.startsWith("set")) {
                    setterMethod = mappedMethods.get(candidate);
                    if (setterMethod.isStatic() || setterMethod.getParameters().length != 1) {
                        setterMethod = null;
                    }

                    final String complimentaryGetter = candidate.replaceAll("^set", "get");
                    if (mappedMethods.containsKey(complimentaryGetter)) {
                        continue;
                    }
                }

                /* ensure that strategies are reachable */
                if ((null == getterMethod && null == setterMethod)
                        || (REQUIRE_BOTH_GETTER_SETTER && (null == getterMethod || null == setterMethod))) {
                    continue;
                }

                /* store property and it's types */
                final Set<String> propertyTypesFqns = new HashSet<>();

                if (null != getterMethod) {
                    propertyTypesFqns.addAll(getterMethod.getType().filterUnknown().getTypes());
                }
                if (null != setterMethod) {
                    final Parameter[] setterParams = setterMethod.getParameters();
                    if (setterParams.length > 0) {
                        propertyTypesFqns.addAll(setterParams[0].getType().filterUnknown().getTypes());
                    }
                }

                /* drop preceding \ in core types */
                final Set<String> propertyTypes = new HashSet<>();
                for (String type : propertyTypesFqns) {
                    if (type.length() > 0) {
                        if ('\\' == type.charAt(0) && 1 == StringUtils.countMatches(type, "\\")) {
                            type = type.replace("\\", "");
                        }
                        propertyTypes.add(type);
                    }
                }
                propertyTypesFqns.clear();

                final String typesAsString = propertyTypes.isEmpty() ? "mixed"
                        : String.join("|", propertyTypes);
                properties.put(StringUtils.uncapitalize(candidate.replaceAll("^(get|set)", "")), typesAsString);
            }

            /* exclude annotated properties: lazy bulk operation */
            if (properties.size() > 0) {
                final Collection<Field> fields = clazz.getFields();
                for (Field candidate : fields) {
                    /* do not process constants and static fields */
                    if (candidate.isConstant() || candidate.getModifier().isStatic()) {
                        continue;
                    }

                    properties.remove(candidate.getName());
                }
                fields.clear();
            }

            return properties;
        }
    };
}

From source file:org.apache.hadoop.mapred.split.TezMapredSplitsGrouper.java

public InputSplit[] getGroupedSplits(Configuration conf, InputSplit[] originalSplits, int desiredNumSplits,
        String wrappedInputFormatName) throws IOException {
    LOG.info("Grouping splits in Tez");

    int configNumSplits = conf.getInt(TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_COUNT, 0);
    if (configNumSplits > 0) {
        // always use config override if specified
        desiredNumSplits = configNumSplits;
        LOG.info("Desired numSplits overridden by config to: " + desiredNumSplits);
    }/*from w  w w .j  a  va  2 s  .  c  om*/

    if (!(configNumSplits > 0 || originalSplits == null || originalSplits.length == 0)) {
        // numSplits has not been overridden by config
        // numSplits has been set at runtime
        // there are splits generated
        // Do sanity checks
        long totalLength = 0;
        for (InputSplit split : originalSplits) {
            totalLength += split.getLength();
        }

        int splitCount = desiredNumSplits > 0 ? desiredNumSplits : originalSplits.length;
        long lengthPerGroup = totalLength / splitCount;

        long maxLengthPerGroup = conf.getLong(TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_MAX_SIZE,
                TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_MAX_SIZE_DEFAULT);
        long minLengthPerGroup = conf.getLong(TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_MIN_SIZE,
                TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_MIN_SIZE_DEFAULT);
        if (maxLengthPerGroup < minLengthPerGroup || minLengthPerGroup <= 0) {
            throw new TezUncheckedException("Invalid max/min group lengths. Required min>0, max>=min. "
                    + " max: " + maxLengthPerGroup + " min: " + minLengthPerGroup);
        }
        if (lengthPerGroup > maxLengthPerGroup) {
            // splits too big to work. Need to override with max size.
            int newDesiredNumSplits = (int) (totalLength / maxLengthPerGroup) + 1;
            LOG.info("Desired splits: " + desiredNumSplits + " too small. " + " Desired splitLength: "
                    + lengthPerGroup + " Max splitLength: " + maxLengthPerGroup + " New desired splits: "
                    + newDesiredNumSplits + " Total length: " + totalLength + " Original splits: "
                    + originalSplits.length);

            desiredNumSplits = newDesiredNumSplits;
        } else if (lengthPerGroup < minLengthPerGroup) {
            // splits too small to work. Need to override with size.
            int newDesiredNumSplits = (int) (totalLength / minLengthPerGroup) + 1;
            LOG.info("Desired splits: " + desiredNumSplits + " too large. " + " Desired splitLength: "
                    + lengthPerGroup + " Min splitLength: " + minLengthPerGroup + " New desired splits: "
                    + newDesiredNumSplits + " Total length: " + totalLength + " Original splits: "
                    + originalSplits.length);

            desiredNumSplits = newDesiredNumSplits;
        }
    }

    if (originalSplits == null) {
        LOG.info("Null original splits");
        return null;
    }

    if (desiredNumSplits == 0 || originalSplits.length == 0 || desiredNumSplits >= originalSplits.length) {
        // nothing set. so return all the splits as is
        LOG.info("Using original number of splits: " + originalSplits.length + " desired splits: "
                + desiredNumSplits);
        InputSplit[] groupedSplits = new TezGroupedSplit[originalSplits.length];
        int i = 0;
        for (InputSplit split : originalSplits) {
            TezGroupedSplit newSplit = new TezGroupedSplit(1, wrappedInputFormatName, split.getLocations());
            newSplit.addSplit(split);
            groupedSplits[i++] = newSplit;
        }
        return groupedSplits;
    }

    String emptyLocation = "EmptyLocation";
    String[] emptyLocations = { emptyLocation };
    List<InputSplit> groupedSplitsList = new ArrayList<InputSplit>(desiredNumSplits);

    long totalLength = 0;
    Map<String, LocationHolder> distinctLocations = createLocationsMap(conf);
    // go through splits and add them to locations
    for (InputSplit split : originalSplits) {
        totalLength += split.getLength();
        String[] locations = split.getLocations();
        if (locations == null || locations.length == 0) {
            locations = emptyLocations;
        }
        for (String location : locations) {
            if (location == null) {
                location = emptyLocation;
            }
            distinctLocations.put(location, null);
        }
    }

    long lengthPerGroup = totalLength / desiredNumSplits;
    int numNodeLocations = distinctLocations.size();
    int numSplitsPerLocation = originalSplits.length / numNodeLocations;
    int numSplitsInGroup = originalSplits.length / desiredNumSplits;

    // allocation loop here so that we have a good initial size for the lists
    for (String location : distinctLocations.keySet()) {
        distinctLocations.put(location, new LocationHolder(numSplitsPerLocation + 1));
    }

    Set<String> locSet = new HashSet<String>();
    for (InputSplit split : originalSplits) {
        locSet.clear();
        SplitHolder splitHolder = new SplitHolder(split);
        String[] locations = split.getLocations();
        if (locations == null || locations.length == 0) {
            locations = emptyLocations;
        }
        for (String location : locations) {
            if (location == null) {
                location = emptyLocation;
            }
            locSet.add(location);
        }
        for (String location : locSet) {
            LocationHolder holder = distinctLocations.get(location);
            holder.splits.add(splitHolder);
        }
    }

    boolean groupByLength = conf.getBoolean(TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_BY_LENGTH,
            TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_BY_LENGTH_DEFAULT);
    boolean groupByCount = conf.getBoolean(TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_BY_COUNT,
            TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_BY_COUNT_DEFAULT);
    if (!(groupByLength || groupByCount)) {
        throw new TezUncheckedException("None of the grouping parameters are true: "
                + TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_BY_LENGTH + ", "
                + TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_BY_COUNT);
    }
    LOG.info("Desired numSplits: " + desiredNumSplits + " lengthPerGroup: " + lengthPerGroup + " numLocations: "
            + numNodeLocations + " numSplitsPerLocation: " + numSplitsPerLocation + " numSplitsInGroup: "
            + numSplitsInGroup + " totalLength: " + totalLength + " numOriginalSplits: " + originalSplits.length
            + " . Grouping by length: " + groupByLength + " count: " + groupByCount);

    // go through locations and group splits
    int splitsProcessed = 0;
    List<SplitHolder> group = new ArrayList<SplitHolder>(numSplitsInGroup + 1);
    Set<String> groupLocationSet = new HashSet<String>(10);
    boolean allowSmallGroups = false;
    boolean doingRackLocal = false;
    int iterations = 0;
    while (splitsProcessed < originalSplits.length) {
        iterations++;
        int numFullGroupsCreated = 0;
        for (Map.Entry<String, LocationHolder> entry : distinctLocations.entrySet()) {
            group.clear();
            groupLocationSet.clear();
            String location = entry.getKey();
            LocationHolder holder = entry.getValue();
            SplitHolder splitHolder = holder.getUnprocessedHeadSplit();
            if (splitHolder == null) {
                // all splits on node processed
                continue;
            }
            int oldHeadIndex = holder.headIndex;
            long groupLength = 0;
            int groupNumSplits = 0;
            do {
                group.add(splitHolder);
                groupLength += splitHolder.split.getLength();
                groupNumSplits++;
                holder.incrementHeadIndex();
                splitHolder = holder.getUnprocessedHeadSplit();
            } while (splitHolder != null
                    && (!groupByLength || (groupLength + splitHolder.split.getLength() <= lengthPerGroup))
                    && (!groupByCount || (groupNumSplits + 1 <= numSplitsInGroup)));

            if (holder.isEmpty() && !allowSmallGroups && (!groupByLength || groupLength < lengthPerGroup / 2)
                    && (!groupByCount || groupNumSplits < numSplitsInGroup / 2)) {
                // group too small, reset it
                holder.headIndex = oldHeadIndex;
                continue;
            }

            numFullGroupsCreated++;

            // One split group created
            String[] groupLocation = { location };
            if (location == emptyLocation) {
                groupLocation = null;
            } else if (doingRackLocal) {
                for (SplitHolder splitH : group) {
                    String[] locations = splitH.split.getLocations();
                    if (locations != null) {
                        for (String loc : locations) {
                            if (loc != null) {
                                groupLocationSet.add(loc);
                            }
                        }
                    }
                }
                groupLocation = groupLocationSet.toArray(groupLocation);
            }
            TezGroupedSplit groupedSplit = new TezGroupedSplit(group.size(), wrappedInputFormatName,
                    groupLocation,
                    // pass rack local hint directly to AM
                    ((doingRackLocal && location != emptyLocation) ? location : null));
            for (SplitHolder groupedSplitHolder : group) {
                groupedSplit.addSplit(groupedSplitHolder.split);
                Preconditions.checkState(groupedSplitHolder.isProcessed == false,
                        "Duplicates in grouping at location: " + location);
                groupedSplitHolder.isProcessed = true;
                splitsProcessed++;
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug("Grouped " + group.size() + " length: " + groupedSplit.getLength() + " split at: "
                        + location);
            }
            groupedSplitsList.add(groupedSplit);
        }

        if (!doingRackLocal && numFullGroupsCreated < 1) {
            // no node could create a node-local group. go rack-local
            doingRackLocal = true;
            // re-create locations
            int numRemainingSplits = originalSplits.length - splitsProcessed;
            Set<InputSplit> remainingSplits = new HashSet<InputSplit>(numRemainingSplits);
            // gather remaining splits.
            for (Map.Entry<String, LocationHolder> entry : distinctLocations.entrySet()) {
                LocationHolder locHolder = entry.getValue();
                while (!locHolder.isEmpty()) {
                    SplitHolder splitHolder = locHolder.getUnprocessedHeadSplit();
                    if (splitHolder != null) {
                        remainingSplits.add(splitHolder.split);
                        locHolder.incrementHeadIndex();
                    }
                }
            }
            if (remainingSplits.size() != numRemainingSplits) {
                throw new TezUncheckedException(
                        "Expected: " + numRemainingSplits + " got: " + remainingSplits.size());
            }

            // doing all this now instead of up front because the number of remaining
            // splits is expected to be much smaller
            RackResolver.init(conf);
            Map<String, String> locToRackMap = new HashMap<String, String>(distinctLocations.size());
            Map<String, LocationHolder> rackLocations = createLocationsMap(conf);
            for (String location : distinctLocations.keySet()) {
                String rack = emptyLocation;
                if (location != emptyLocation) {
                    rack = RackResolver.resolve(location).getNetworkLocation();
                }
                locToRackMap.put(location, rack);
                if (rackLocations.get(rack) == null) {
                    // splits will probably be located in all racks
                    rackLocations.put(rack, new LocationHolder(numRemainingSplits));
                }
            }
            distinctLocations.clear();
            HashSet<String> rackSet = new HashSet<String>(rackLocations.size());
            int numRackSplitsToGroup = remainingSplits.size();
            for (InputSplit split : originalSplits) {
                if (numRackSplitsToGroup == 0) {
                    break;
                }
                // Iterate through the original splits in their order and consider them for grouping. 
                // This maintains the original ordering in the list and thus subsequent grouping will 
                // maintain that order
                if (!remainingSplits.contains(split)) {
                    continue;
                }
                numRackSplitsToGroup--;
                rackSet.clear();
                SplitHolder splitHolder = new SplitHolder(split);
                String[] locations = split.getLocations();
                if (locations == null || locations.length == 0) {
                    locations = emptyLocations;
                }
                for (String location : locations) {
                    if (location == null) {
                        location = emptyLocation;
                    }
                    rackSet.add(locToRackMap.get(location));
                }
                for (String rack : rackSet) {
                    rackLocations.get(rack).splits.add(splitHolder);
                }
            }
            remainingSplits.clear();
            distinctLocations = rackLocations;
            // adjust split length to be smaller because the data is non local
            float rackSplitReduction = conf.getFloat(
                    TezMapReduceSplitsGrouper.TEZ_GROUPING_RACK_SPLIT_SIZE_REDUCTION,
                    TezMapReduceSplitsGrouper.TEZ_GROUPING_RACK_SPLIT_SIZE_REDUCTION_DEFAULT);
            if (rackSplitReduction > 0) {
                long newLengthPerGroup = (long) (lengthPerGroup * rackSplitReduction);
                int newNumSplitsInGroup = (int) (numSplitsInGroup * rackSplitReduction);
                if (newLengthPerGroup > 0) {
                    lengthPerGroup = newLengthPerGroup;
                }
                if (newNumSplitsInGroup > 0) {
                    numSplitsInGroup = newNumSplitsInGroup;
                }
            }

            LOG.info("Doing rack local after iteration: " + iterations + " splitsProcessed: " + splitsProcessed
                    + " numFullGroupsInRound: " + numFullGroupsCreated + " totalGroups: "
                    + groupedSplitsList.size() + " lengthPerGroup: " + lengthPerGroup + " numSplitsInGroup: "
                    + numSplitsInGroup);

            // dont do smallGroups for the first pass
            continue;
        }

        if (!allowSmallGroups && numFullGroupsCreated <= numNodeLocations / 10) {
            // a few nodes have a lot of data or data is thinly spread across nodes
            // so allow small groups now        
            allowSmallGroups = true;
            LOG.info("Allowing small groups after iteration: " + iterations + " splitsProcessed: "
                    + splitsProcessed + " numFullGroupsInRound: " + numFullGroupsCreated + " totalGroups: "
                    + groupedSplitsList.size());
        }

        if (LOG.isDebugEnabled()) {
            LOG.debug("Iteration: " + iterations + " splitsProcessed: " + splitsProcessed
                    + " numFullGroupsInRound: " + numFullGroupsCreated + " totalGroups: "
                    + groupedSplitsList.size());
        }
    }
    InputSplit[] groupedSplits = new InputSplit[groupedSplitsList.size()];
    groupedSplitsList.toArray(groupedSplits);
    LOG.info("Number of splits desired: " + desiredNumSplits + " created: " + groupedSplitsList.size()
            + " splitsProcessed: " + splitsProcessed);
    return groupedSplits;
}

From source file:com.netflix.genie.client.JobClientIntegrationTests.java

/**
 * Helper method to create a cluster/command combination for all tests.
 *
 * @throws Exception If it fails to create the cluster/command combination.
 *//* w w w.  j  a va  2 s  . c o m*/
private void createClusterAndCommandForTest() throws Exception {

    final Set<String> tags = Sets.newHashSet("laptop");

    final Cluster cluster = new Cluster.Builder(CLUSTER_NAME, "user", "1.0", ClusterStatus.UP).withTags(tags)
            .build();

    final String clusterId = clusterClient.createCluster(cluster);

    tags.clear();
    tags.add("bash");

    final Command command = new Command.Builder(COMMAND_NAME, "user", "version", CommandStatus.ACTIVE, "bash",
            1000).withTags(tags).build();

    final String commandId = commandClient.createCommand(command);

    clusterClient.addCommandsToCluster(clusterId, Lists.newArrayList(commandId));
}

From source file:org.apache.tinkerpop.gremlin.process.computer.traversal.strategy.decoration.VertexProgramStrategy.java

@Override
public void apply(final Traversal.Admin<?, ?> traversal) {
    // VertexPrograms can only execute at the root level of a Traversal and should not be applied locally prior to RemoteStrategy
    if (!(traversal.getParent() instanceof EmptyStep)
            || traversal.getStrategies().getStrategy(RemoteStrategy.class).isPresent())
        return;/*from  ww w. ja v  a2s .c  om*/

    // back propagate as()-labels off of vertex computing steps
    Step<?, ?> currentStep = traversal.getEndStep();
    final Set<String> currentLabels = new HashSet<>();
    while (!(currentStep instanceof EmptyStep)) {
        if (currentStep instanceof VertexComputing && !(currentStep instanceof ProgramVertexProgramStep)) { // todo: is there a general solution?
            currentLabels.addAll(currentStep.getLabels());
            currentStep.getLabels().forEach(currentStep::removeLabel);
        } else {
            currentLabels.forEach(currentStep::addLabel);
            currentLabels.clear();
        }
        currentStep = currentStep.getPreviousStep();
    }

    // push GraphStep forward in the chain to reduce the number of TraversalVertexProgram compilations
    currentStep = traversal.getStartStep();
    while (!(currentStep instanceof EmptyStep)) {
        if (currentStep instanceof GraphStep && currentStep.getNextStep() instanceof VertexComputing) {
            int index = TraversalHelper.stepIndex(currentStep.getNextStep(), traversal);
            traversal.removeStep(currentStep);
            traversal.addStep(index, currentStep);
        } else
            currentStep = currentStep.getNextStep();
    }

    // wrap all non-VertexComputing steps into a TraversalVertexProgramStep
    currentStep = traversal.getStartStep();
    while (!(currentStep instanceof EmptyStep)) {
        Traversal.Admin<?, ?> computerTraversal = new DefaultTraversal<>();
        Step<?, ?> firstLegalOLAPStep = getFirstLegalOLAPStep(currentStep);
        Step<?, ?> lastLegalOLAPStep = getLastLegalOLAPStep(currentStep);
        if (!(firstLegalOLAPStep instanceof EmptyStep)) {
            int index = TraversalHelper.stepIndex(firstLegalOLAPStep, traversal);
            TraversalHelper.removeToTraversal(firstLegalOLAPStep, lastLegalOLAPStep.getNextStep(),
                    (Traversal.Admin) computerTraversal);
            final TraversalVertexProgramStep traversalVertexProgramStep = new TraversalVertexProgramStep(
                    traversal, computerTraversal);
            traversal.addStep(index, traversalVertexProgramStep);
        }
        currentStep = traversal.getStartStep();
        while (!(currentStep instanceof EmptyStep)) {
            if (!(currentStep instanceof VertexComputing))
                break;
            currentStep = currentStep.getNextStep();
        }
    }
    // if the last vertex computing step is a TraversalVertexProgramStep convert to OLTP with ComputerResultStep
    TraversalHelper.getLastStepOfAssignableClass(VertexComputing.class, traversal).ifPresent(step -> {
        if (step instanceof TraversalVertexProgramStep) {
            final ComputerResultStep computerResultStep = new ComputerResultStep<>(traversal);
            ((TraversalVertexProgramStep) step).getGlobalChildren().get(0).getEndStep().getLabels()
                    .forEach(computerResultStep::addLabel);
            // labeling should happen in TraversalVertexProgram (perhaps MapReduce)
            TraversalHelper.insertAfterStep(computerResultStep, (Step) step, traversal);
        }
    });
    // if there is a dangling vertex computing step, add an identity traversal (solve this in the future with a specialized MapReduce)
    if (traversal.getEndStep() instanceof VertexComputing
            && !(traversal.getEndStep() instanceof TraversalVertexProgramStep)) {
        final TraversalVertexProgramStep traversalVertexProgramStep = new TraversalVertexProgramStep(traversal,
                __.identity().asAdmin());
        traversal.addStep(traversalVertexProgramStep);
        traversal.addStep(new ComputerResultStep<>(traversal));
    }
    // all vertex computing steps needs the graph computer function
    traversal.getSteps().stream().filter(step -> step instanceof VertexComputing)
            .forEach(step -> ((VertexComputing) step).setComputer(this.computer));
}

From source file:eu.stratosphere.nephele.services.memorymanager.spi.DefaultMemoryManager.java

@Override
public void releaseAll(AbstractInvokable owner) {
    // -------------------- BEGIN CRITICAL SECTION -------------------
    synchronized (this.lock) {
        if (this.isShutDown) {
            throw new IllegalStateException("Memory manager has been shut down.");
        }/*from  ww  w.  ja  va  2  s.  c  om*/

        // get all segments
        final Set<DefaultMemorySegment> segments = this.allocatedSegments.remove(owner);

        // all segments may have been freed previously individually
        if (segments == null || segments.isEmpty()) {
            return;
        }

        // free each segment
        for (DefaultMemorySegment seg : segments) {
            final byte[] buffer = seg.destroy();
            this.freeSegments.add(buffer);
        }

        segments.clear();
    }
    // -------------------- END CRITICAL SECTION -------------------
}