Example usage for java.util Set clear

List of usage examples for java.util Set clear

Introduction

In this page you can find the example usage for java.util Set clear.

Prototype

void clear();

Source Link

Document

Removes all of the elements from this set (optional operation).

Usage

From source file:nz.co.senanque.rules.RuleSessionImpl.java

protected void forwardChain() {
    s_log.debug("{}Forward Chain...", m_indenter);
    m_indenter.increment();//ww  w . j a va  2s  . co m
    final Set<RuleContext> rules = new HashSet<RuleContext>();
    m_firedRules.clear();
    while (m_agenda.size() > 0) {
        rules.clear();
        for (RuleProxyField proxy : m_agenda) {
            s_log.debug("{}agenda item: {}", m_indenter, proxy.toString());
            List<RuleContext> ruleContexts = proxy.getInputRules();
            rules.addAll(ruleContexts);
        }
        if (s_log.isDebugEnabled()) {
            s_log.debug("{}rules to fire:{}", m_indenter, rules.size());
            m_indenter.increment();
            for (RuleContext rule : rules) {
                s_log.debug("{}{}", m_indenter, rule);
            }
            m_indenter.decrement();
        }
        clearAgenda();
        for (RuleContext ruleContext : rules) {
            ruleContext.fire();
        }
    }
    m_indenter.decrement();
    s_log.debug("{}Completed Forward Chain", m_indenter);
}

From source file:de.hasait.clap.CLAP.java

public CLAPResult parse(final String... pArgs) {
    final Set<CLAPParseContext> contextsWithInvalidToken = new HashSet<CLAPParseContext>();
    final List<CLAPParseContext> parsedContexts = new ArrayList<CLAPParseContext>();
    final LinkedList<CLAPParseContext> activeContexts = new LinkedList<CLAPParseContext>();
    activeContexts.add(new CLAPParseContext(this, pArgs));
    while (!activeContexts.isEmpty()) {
        final CLAPParseContext context = activeContexts.removeFirst();
        if (context.hasMoreTokens()) {
            final CLAPParseContext[] result = _root.parse(context);
            if (result != null) {
                for (final CLAPParseContext nextContext : result) {
                    activeContexts.add(nextContext);
                }//from w w  w . j a  v a 2s  .  c om
            } else {
                contextsWithInvalidToken.add(context);
            }
        } else {
            parsedContexts.add(context);
        }
    }
    if (parsedContexts.isEmpty()) {
        int maxArgIndex = Integer.MIN_VALUE;
        final Set<String> invalidTokensOfBestContexts = new HashSet<String>();
        for (final CLAPParseContext context : contextsWithInvalidToken) {
            final int currentArgIndex = context.getCurrentArgIndex();
            if (currentArgIndex > maxArgIndex) {
                invalidTokensOfBestContexts.clear();
            }
            if (currentArgIndex >= maxArgIndex) {
                maxArgIndex = currentArgIndex;
                invalidTokensOfBestContexts.add(context.currentArg());
            }
        }
        throw new CLAPException(
                nls(NLSKEY_CLAP_ERROR_INVALID_TOKEN_LIST, StringUtils.join(invalidTokensOfBestContexts, ", "))); //$NON-NLS-1$
    }

    final Map<CLAPParseContext, List<String>> contextErrorMessages = new HashMap<CLAPParseContext, List<String>>();
    final Set<CLAPResultImpl> results = new LinkedHashSet<CLAPResultImpl>();
    for (final CLAPParseContext context : parsedContexts) {
        final List<String> errorMessages = new ArrayList<String>();
        _root.validate(context, errorMessages);
        if (errorMessages.isEmpty()) {
            final CLAPResultImpl result = new CLAPResultImpl();
            _root.fillResult(context, result);
            results.add(result);
        } else {
            contextErrorMessages.put(context, errorMessages);
        }
    }

    if (results.isEmpty()) {
        int minErrorMessages = Integer.MAX_VALUE;
        final List<String> errorMessagesOfBestContexts = new ArrayList<String>();
        for (final Entry<CLAPParseContext, List<String>> entry : contextErrorMessages.entrySet()) {
            final int countErrorMessages = entry.getValue().size();
            if (countErrorMessages < minErrorMessages) {
                errorMessagesOfBestContexts.clear();
            }
            if (countErrorMessages <= minErrorMessages) {
                minErrorMessages = countErrorMessages;
                errorMessagesOfBestContexts
                        .add(StringUtils.join(entry.getValue(), nls(NLSKEY_CLAP_ERROR_ERROR_MESSAGE_SPLIT)));
            }
        }
        throw new CLAPException(nls(NLSKEY_CLAP_ERROR_VALIDATION_FAILED,
                StringUtils.join(errorMessagesOfBestContexts, nls(NLSKEY_CLAP_ERROR_ERROR_MESSAGES_SPLIT))));
    }

    if (results.size() > 1) {
        throw new CLAPException(nls(NLSKEY_CLAP_ERROR_AMBIGUOUS_RESULT));
    }

    return results.iterator().next();
}

From source file:edu.toronto.cs.ontools.taxonomy.AbstractTaxonomy.java

protected Set<String> findAncestors(String id) {
    Set<String> result = new HashSet<String>();
    if (this.getTerm(id) == null) {
        return result;
    }/*from www .  ja  v  a  2  s .  co m*/
    Set<String> front = new HashSet<String>();
    Set<String> newFront = new HashSet<String>();
    front.add(this.getRealId(id));
    result.add(this.getRealId(id));
    while (!front.isEmpty()) {
        for (String nextTermId : front) {

            for (String parentTermId : this.getTerm(nextTermId).getParents()) {
                if (!result.contains(parentTermId)) {
                    newFront.add(parentTermId);
                    result.add(parentTermId);
                }
            }
        }
        front.clear();
        front.addAll(newFront);
        newFront.clear();
    }
    return result;
}

From source file:org.apache.hadoop.mapreduce.split.TezMapReduceSplitsGrouper.java

public List<InputSplit> getGroupedSplits(Configuration conf, List<InputSplit> originalSplits,
        int desiredNumSplits, String wrappedInputFormatName) throws IOException, InterruptedException {
    LOG.info("Grouping splits in Tez");

    int configNumSplits = conf.getInt(TEZ_GROUPING_SPLIT_COUNT, 0);
    if (configNumSplits > 0) {
        // always use config override if specified
        desiredNumSplits = configNumSplits;
        LOG.info("Desired numSplits overridden by config to: " + desiredNumSplits);
    }//from  w  w  w.j a  v a2 s . co  m

    if (!(configNumSplits > 0 || originalSplits == null || originalSplits.size() == 0)) {
        // numSplits has not been overridden by config
        // numSplits has been set at runtime
        // there are splits generated
        // desired splits is less than number of splits generated
        // Do sanity checks
        long totalLength = 0;
        for (InputSplit split : originalSplits) {
            totalLength += split.getLength();
        }

        int splitCount = desiredNumSplits > 0 ? desiredNumSplits : originalSplits.size();
        long lengthPerGroup = totalLength / splitCount;

        long maxLengthPerGroup = conf.getLong(TEZ_GROUPING_SPLIT_MAX_SIZE, TEZ_GROUPING_SPLIT_MAX_SIZE_DEFAULT);
        long minLengthPerGroup = conf.getLong(TEZ_GROUPING_SPLIT_MIN_SIZE, TEZ_GROUPING_SPLIT_MIN_SIZE_DEFAULT);
        if (maxLengthPerGroup < minLengthPerGroup || minLengthPerGroup <= 0) {
            throw new TezUncheckedException("Invalid max/min group lengths. Required min>0, max>=min. "
                    + " max: " + maxLengthPerGroup + " min: " + minLengthPerGroup);
        }
        if (lengthPerGroup > maxLengthPerGroup) {
            // splits too big to work. Need to override with max size.
            int newDesiredNumSplits = (int) (totalLength / maxLengthPerGroup) + 1;
            LOG.info("Desired splits: " + desiredNumSplits + " too small. " + " Desired splitLength: "
                    + lengthPerGroup + " Max splitLength: " + maxLengthPerGroup + " New desired splits: "
                    + newDesiredNumSplits + " Total length: " + totalLength + " Original splits: "
                    + originalSplits.size());

            desiredNumSplits = newDesiredNumSplits;
        } else if (lengthPerGroup < minLengthPerGroup) {
            // splits too small to work. Need to override with size.
            int newDesiredNumSplits = (int) (totalLength / minLengthPerGroup) + 1;
            LOG.info("Desired splits: " + desiredNumSplits + " too large. " + " Desired splitLength: "
                    + lengthPerGroup + " Min splitLength: " + minLengthPerGroup + " New desired splits: "
                    + newDesiredNumSplits + " Total length: " + totalLength + " Original splits: "
                    + originalSplits.size());

            desiredNumSplits = newDesiredNumSplits;
        }
    }

    List<InputSplit> groupedSplits = null;

    if (desiredNumSplits == 0 || originalSplits.size() == 0 || desiredNumSplits >= originalSplits.size()) {
        // nothing set. so return all the splits as is
        LOG.info("Using original number of splits: " + originalSplits.size() + " desired splits: "
                + desiredNumSplits);
        groupedSplits = new ArrayList<InputSplit>(originalSplits.size());
        for (InputSplit split : originalSplits) {
            TezGroupedSplit newSplit = new TezGroupedSplit(1, wrappedInputFormatName, split.getLocations());
            newSplit.addSplit(split);
            groupedSplits.add(newSplit);
        }
        return groupedSplits;
    }

    String emptyLocation = "EmptyLocation";
    String[] emptyLocations = { emptyLocation };
    groupedSplits = new ArrayList<InputSplit>(desiredNumSplits);

    long totalLength = 0;
    Map<String, LocationHolder> distinctLocations = createLocationsMap(conf);
    // go through splits and add them to locations
    for (InputSplit split : originalSplits) {
        totalLength += split.getLength();
        String[] locations = split.getLocations();
        if (locations == null || locations.length == 0) {
            locations = emptyLocations;
        }
        for (String location : locations) {
            if (location == null) {
                location = emptyLocation;
            }
            distinctLocations.put(location, null);
        }
    }

    long lengthPerGroup = totalLength / desiredNumSplits;
    int numNodeLocations = distinctLocations.size();
    int numSplitsPerLocation = originalSplits.size() / numNodeLocations;
    int numSplitsInGroup = originalSplits.size() / desiredNumSplits;

    // allocation loop here so that we have a good initial size for the lists
    for (String location : distinctLocations.keySet()) {
        distinctLocations.put(location, new LocationHolder(numSplitsPerLocation + 1));
    }

    Set<String> locSet = new HashSet<String>();
    for (InputSplit split : originalSplits) {
        locSet.clear();
        SplitHolder splitHolder = new SplitHolder(split);
        String[] locations = split.getLocations();
        if (locations == null || locations.length == 0) {
            locations = emptyLocations;
        }
        for (String location : locations) {
            if (location == null) {
                location = emptyLocation;
            }
            locSet.add(location);
        }
        for (String location : locSet) {
            LocationHolder holder = distinctLocations.get(location);
            holder.splits.add(splitHolder);
        }
    }

    boolean groupByLength = conf.getBoolean(TEZ_GROUPING_SPLIT_BY_LENGTH, TEZ_GROUPING_SPLIT_BY_LENGTH_DEFAULT);
    boolean groupByCount = conf.getBoolean(TEZ_GROUPING_SPLIT_BY_COUNT, TEZ_GROUPING_SPLIT_BY_COUNT_DEFAULT);
    if (!(groupByLength || groupByCount)) {
        throw new TezUncheckedException("None of the grouping parameters are true: "
                + TEZ_GROUPING_SPLIT_BY_LENGTH + ", " + TEZ_GROUPING_SPLIT_BY_COUNT);
    }
    LOG.info("Desired numSplits: " + desiredNumSplits + " lengthPerGroup: " + lengthPerGroup + " numLocations: "
            + numNodeLocations + " numSplitsPerLocation: " + numSplitsPerLocation + " numSplitsInGroup: "
            + numSplitsInGroup + " totalLength: " + totalLength + " numOriginalSplits: " + originalSplits.size()
            + " . Grouping by length: " + groupByLength + " count: " + groupByCount);

    // go through locations and group splits
    int splitsProcessed = 0;
    List<SplitHolder> group = new ArrayList<SplitHolder>(numSplitsInGroup);
    Set<String> groupLocationSet = new HashSet<String>(10);
    boolean allowSmallGroups = false;
    boolean doingRackLocal = false;
    int iterations = 0;
    while (splitsProcessed < originalSplits.size()) {
        iterations++;
        int numFullGroupsCreated = 0;
        for (Map.Entry<String, LocationHolder> entry : distinctLocations.entrySet()) {
            group.clear();
            groupLocationSet.clear();
            String location = entry.getKey();
            LocationHolder holder = entry.getValue();
            SplitHolder splitHolder = holder.getUnprocessedHeadSplit();
            if (splitHolder == null) {
                // all splits on node processed
                continue;
            }
            int oldHeadIndex = holder.headIndex;
            long groupLength = 0;
            int groupNumSplits = 0;
            do {
                group.add(splitHolder);
                groupLength += splitHolder.split.getLength();
                groupNumSplits++;
                holder.incrementHeadIndex();
                splitHolder = holder.getUnprocessedHeadSplit();
            } while (splitHolder != null
                    && (!groupByLength || (groupLength + splitHolder.split.getLength() <= lengthPerGroup))
                    && (!groupByCount || (groupNumSplits + 1 <= numSplitsInGroup)));

            if (holder.isEmpty() && !allowSmallGroups && (!groupByLength || groupLength < lengthPerGroup / 2)
                    && (!groupByCount || groupNumSplits < numSplitsInGroup / 2)) {
                // group too small, reset it
                holder.headIndex = oldHeadIndex;
                continue;
            }

            numFullGroupsCreated++;

            // One split group created
            String[] groupLocation = { location };
            if (location == emptyLocation) {
                groupLocation = null;
            } else if (doingRackLocal) {
                for (SplitHolder splitH : group) {
                    String[] locations = splitH.split.getLocations();
                    if (locations != null) {
                        for (String loc : locations) {
                            if (loc != null) {
                                groupLocationSet.add(loc);
                            }
                        }
                    }
                }
                groupLocation = groupLocationSet.toArray(groupLocation);
            }
            TezGroupedSplit groupedSplit = new TezGroupedSplit(group.size(), wrappedInputFormatName,
                    groupLocation,
                    // pass rack local hint directly to AM
                    ((doingRackLocal && location != emptyLocation) ? location : null));
            for (SplitHolder groupedSplitHolder : group) {
                groupedSplit.addSplit(groupedSplitHolder.split);
                Preconditions.checkState(groupedSplitHolder.isProcessed == false,
                        "Duplicates in grouping at location: " + location);
                groupedSplitHolder.isProcessed = true;
                splitsProcessed++;
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug("Grouped " + group.size() + " length: " + groupedSplit.getLength() + " split at: "
                        + location);
            }
            groupedSplits.add(groupedSplit);
        }

        if (!doingRackLocal && numFullGroupsCreated < 1) {
            // no node could create a node-local group. go rack-local
            doingRackLocal = true;
            // re-create locations
            int numRemainingSplits = originalSplits.size() - splitsProcessed;
            Set<InputSplit> remainingSplits = new HashSet<InputSplit>(numRemainingSplits);
            // gather remaining splits.
            for (Map.Entry<String, LocationHolder> entry : distinctLocations.entrySet()) {
                LocationHolder locHolder = entry.getValue();
                while (!locHolder.isEmpty()) {
                    SplitHolder splitHolder = locHolder.getUnprocessedHeadSplit();
                    if (splitHolder != null) {
                        remainingSplits.add(splitHolder.split);
                        locHolder.incrementHeadIndex();
                    }
                }
            }
            if (remainingSplits.size() != numRemainingSplits) {
                throw new TezUncheckedException(
                        "Expected: " + numRemainingSplits + " got: " + remainingSplits.size());
            }

            // doing all this now instead of up front because the number of remaining
            // splits is expected to be much smaller
            RackResolver.init(conf);
            Map<String, String> locToRackMap = new HashMap<String, String>(distinctLocations.size());
            Map<String, LocationHolder> rackLocations = createLocationsMap(conf);
            for (String location : distinctLocations.keySet()) {
                String rack = emptyLocation;
                if (location != emptyLocation) {
                    rack = RackResolver.resolve(location).getNetworkLocation();
                }
                locToRackMap.put(location, rack);
                if (rackLocations.get(rack) == null) {
                    // splits will probably be located in all racks
                    rackLocations.put(rack, new LocationHolder(numRemainingSplits));
                }
            }
            distinctLocations.clear();
            HashSet<String> rackSet = new HashSet<String>(rackLocations.size());
            int numRackSplitsToGroup = remainingSplits.size();
            for (InputSplit split : originalSplits) {
                if (numRackSplitsToGroup == 0) {
                    break;
                }
                // Iterate through the original splits in their order and consider them for grouping. 
                // This maintains the original ordering in the list and thus subsequent grouping will 
                // maintain that order
                if (!remainingSplits.contains(split)) {
                    continue;
                }
                numRackSplitsToGroup--;
                rackSet.clear();
                SplitHolder splitHolder = new SplitHolder(split);
                String[] locations = split.getLocations();
                if (locations == null || locations.length == 0) {
                    locations = emptyLocations;
                }
                for (String location : locations) {
                    if (location == null) {
                        location = emptyLocation;
                    }
                    rackSet.add(locToRackMap.get(location));
                }
                for (String rack : rackSet) {
                    rackLocations.get(rack).splits.add(splitHolder);
                }
            }

            remainingSplits.clear();
            distinctLocations = rackLocations;
            // adjust split length to be smaller because the data is non local
            float rackSplitReduction = conf.getFloat(TEZ_GROUPING_RACK_SPLIT_SIZE_REDUCTION,
                    TEZ_GROUPING_RACK_SPLIT_SIZE_REDUCTION_DEFAULT);
            if (rackSplitReduction > 0) {
                long newLengthPerGroup = (long) (lengthPerGroup * rackSplitReduction);
                int newNumSplitsInGroup = (int) (numSplitsInGroup * rackSplitReduction);
                if (newLengthPerGroup > 0) {
                    lengthPerGroup = newLengthPerGroup;
                }
                if (newNumSplitsInGroup > 0) {
                    numSplitsInGroup = newNumSplitsInGroup;
                }
            }

            LOG.info("Doing rack local after iteration: " + iterations + " splitsProcessed: " + splitsProcessed
                    + " numFullGroupsInRound: " + numFullGroupsCreated + " totalGroups: " + groupedSplits.size()
                    + " lengthPerGroup: " + lengthPerGroup + " numSplitsInGroup: " + numSplitsInGroup);

            // dont do smallGroups for the first pass
            continue;
        }

        if (!allowSmallGroups && numFullGroupsCreated <= numNodeLocations / 10) {
            // a few nodes have a lot of data or data is thinly spread across nodes
            // so allow small groups now        
            allowSmallGroups = true;
            LOG.info("Allowing small groups after iteration: " + iterations + " splitsProcessed: "
                    + splitsProcessed + " numFullGroupsInRound: " + numFullGroupsCreated + " totalGroups: "
                    + groupedSplits.size());
        }

        if (LOG.isDebugEnabled()) {
            LOG.debug("Iteration: " + iterations + " splitsProcessed: " + splitsProcessed
                    + " numFullGroupsInRound: " + numFullGroupsCreated + " totalGroups: "
                    + groupedSplits.size());
        }
    }
    LOG.info("Number of splits desired: " + desiredNumSplits + " created: " + groupedSplits.size()
            + " splitsProcessed: " + splitsProcessed);
    return groupedSplits;
}

From source file:org.hawkular.alerter.prometheus.PrometheusAlerter.java

private synchronized void update() {
    log.debug("Refreshing External Prometheus Triggers!");
    try {//from  w ww  .  ja v  a2  s  . c o  m
        if (expressionExecutor == null) {
            expressionExecutor = new ScheduledThreadPoolExecutor(
                    Integer.valueOf(defaultProperties.get(THREAD_POOL_SIZE)));
        }

        Set<ExternalCondition> activeConditions = new HashSet<>();
        log.debugf("Found [%d] active External Prometheus Triggers!", activeTriggers.size());

        // for each trigger look for Prometheus Conditions and start running them
        Collection<Condition> conditions = null;
        for (Trigger trigger : activeTriggers.values()) {
            try {
                conditions = definitions.getTriggerConditions(trigger.getTenantId(), trigger.getId(), null);
                log.debugf("Checking [%s] Conditions for external Prometheus trigger [%s]", conditions.size(),
                        trigger.getName());
            } catch (Exception e) {
                log.error("Failed to fetch Conditions when scheduling prometheus conditions for " + trigger, e);
                continue;
            }
            for (Condition condition : conditions) {
                if (condition instanceof ExternalCondition) {
                    ExternalCondition externalCondition = (ExternalCondition) condition;
                    if (ALERTER_ID.equals(externalCondition.getAlerterId())) {
                        log.debugf("Found Prometheus ExternalCondition %s", externalCondition);
                        activeConditions.add(externalCondition);
                        if (expressionFutures.containsKey(externalCondition)) {
                            log.debugf("Skipping, already evaluating %s", externalCondition);

                        } else {
                            try {
                                // start the job. TODO: Do we need a delay for any reason?
                                log.debugf("Adding runner for %s", externalCondition);

                                Map<String, String> properties = new HashMap<>(defaultProperties);
                                if (trigger.getContext().containsKey(CONTEXT_URL)) {
                                    properties.put(URL, trigger.getContext().get(CONTEXT_URL));
                                }
                                String frequency = trigger.getContext().containsKey(CONTEXT_FREQUENCY)
                                        ? trigger.getContext().get(CONTEXT_FREQUENCY)
                                        : CONTEXT_FREQUENCY_DEFAULT;

                                ExpressionRunner runner = new ExpressionRunner(alerts, properties,
                                        externalCondition);
                                expressionFutures.put(externalCondition, expressionExecutor.scheduleAtFixedRate(
                                        runner, 0L, Long.valueOf(frequency), TimeUnit.SECONDS));
                            } catch (Exception e) {
                                log.error("Failed to schedule expression for Prometheus condition "
                                        + externalCondition, e);
                            }
                        }
                    }
                }
            }
        }

        // cancel obsolete expressions
        Set<ExternalCondition> temp = new HashSet<>();
        for (Map.Entry<ExternalCondition, ScheduledFuture<?>> me : expressionFutures.entrySet()) {
            ExternalCondition ec = me.getKey();
            if (!activeConditions.contains(ec)) {
                log.debugf("Canceling evaluation of obsolete External Prometheus Condition %s", ec);

                me.getValue().cancel(true);
                temp.add(ec);
            }
        }
        expressionFutures.keySet().removeAll(temp);
        temp.clear();

    } catch (Exception e) {
        log.error("Failed to fetch Triggers for scheduling Prometheus conditions.", e);
    }
}

From source file:com.npower.dm.hibernate.management.ProfileAssignmentManagementBeanImpl.java

/**
 * Add or UpdateEntity the value of ProfileAttributeEntity ownen by the
 * ProfileAssignmentEntity. If not found ProfileAttributeValueEntity by the
 * attributename, will create a single value, text mode value object. If found
 * it, will override the attribute to single value, text mode value's object.
 * all of multi-value item will be deleted.
 * /*from  w w  w.  j  av  a 2s .  c om*/
 * Caution: Assign null to value is permitted. this will set the value to
 * null, AttributeValue will not be deleted!
 * 
 * Caution: Order of AttributeValue will automaticlly increased! The
 * AttributeValue added lastestly will be bottom.
 * 
 * @param name
 * @param value
 * @throws DMException
 */
public void setAttributeValue(ProfileAssignment assignment, String name, String value) throws DMException {

    // update this profile config, first. make sure the profileID will generated
    // by hibernate.
    Session hsession = this.getHibernateSession();
    hsession.saveOrUpdate(assignment);

    Clob clobValue = (value != null) ? Hibernate.createClob(value) : null;

    // Check exists?
    Set<ProfileAssignmentValue> vMaps = ((ProfileAssignmentEntity) assignment).getProfileAssignValues();
    for (Iterator<ProfileAssignmentValue> i = vMaps.iterator(); i.hasNext();) {
        ProfileAssignmentValue vMap = (ProfileAssignmentValue) i.next();
        ProfileAttributeValueEntity v = (ProfileAttributeValueEntity) vMap.getProfileAttribValue();
        if (name.equals(v.getProfileAttribute().getName())) {
            v.setRawData(clobValue);
            // Set to single value mode
            v.setIsMultiValued(false);
            v.setItemDataKind(ProfileAttributeValue.ITEM_DATA_KIND_BIN);
            v.setMFormat(DDFNode.DDF_FORMAT_CHR);

            Set<?> valueItemSet = v.getProfileValueItems();
            for (Iterator<?> j = valueItemSet.iterator(); j.hasNext();) {
                hsession.delete(j.next());
            }
            valueItemSet.clear();
            return;
        }
    }

    // Create a new AttributeValue
    ProfileTemplate template = assignment.getProfileConfig().getProfileTemplate();
    ManagementBeanFactory factory = this.getManagementBeanFactory();
    ProfileAttribute attr = factory.createProfileTemplateBean().getProfileAttributeByName(template.getName(),
            name);
    if (attr == null) {
        throw new DMException(
                "Could not find attribute by name: " + name + " from the template: " + template.getName());
    }

    ProfileAttributeValue av = new ProfileAttributeValueEntity();
    av.setProfileAttribute(attr);
    av.setRawData(clobValue);
    av.setIsMultiValued(false);
    av.setItemDataKind(ProfileAttributeValue.ITEM_DATA_KIND_BIN);
    av.setMFormat(DDFNode.DDF_FORMAT_CHR);
    hsession.saveOrUpdate(av);

    // New a ProfileAssignmentValueID
    ProfileAssignmentValueID mapID = new ProfileAssignmentValueID();
    mapID.setAttributeValueId(av.getID());
    mapID.setProfileAssignmentId(assignment.getID());
    // New a ProfileAssignmentValue
    // long index = this.getProfileValueMaps().size() + 1;
    ProfileAssignmentValue map = new ProfileAssignmentValue(mapID, av, assignment);

    // Link to ProfileAssignmentEntity
    ((ProfileAssignmentEntity) assignment).getProfileAssignValues().add(map);

    hsession.saveOrUpdate(map);
}

From source file:org.syncope.core.rest.data.UserDataBinder.java

/**
 * Update user, given UserMod./*w  w w .  j  ava 2 s .  com*/
 *
 * @param user to be updated
 * @param userMod bean containing update request
 * @return updated user + propagation by resource
 * @throws SyncopeClientCompositeErrorException if anything goes wrong
 * @see PropagationByResource
 */
public PropagationByResource update(final SyncopeUser user, final UserMod userMod)
        throws SyncopeClientCompositeErrorException {

    PropagationByResource propByRes = new PropagationByResource();

    SyncopeClientCompositeErrorException scce = new SyncopeClientCompositeErrorException(
            HttpStatus.BAD_REQUEST);

    // when requesting to add user to new resources, either directly or
    // through role subscription, password is mandatory (issue 147)
    // first, let's take current resources into account
    Set<String> currentResources = user.getResourceNames();

    // password
    if (userMod.getPassword() != null) {
        int passwordHistorySize = 0;
        try {
            Policy policy = policyDAO.getGlobalPasswordPolicy();
            PasswordPolicySpec passwordPolicy = policy.getSpecification();
            passwordHistorySize = passwordPolicy.getHistoryLength();
        } catch (Throwable ignore) {
            // ignore exceptions
        }

        user.setPassword(userMod.getPassword(), getCipherAlgoritm(), passwordHistorySize);

        user.setChangePwdDate(new Date());

        propByRes.addAll(PropagationOperation.UPDATE, user.getResourceNames());
    }

    // username
    if (userMod.getUsername() != null && !userMod.getUsername().equals(user.getUsername())) {

        String oldUsername = user.getUsername();

        user.setUsername(userMod.getUsername());
        propByRes.addAll(PropagationOperation.UPDATE, user.getResourceNames());

        for (ExternalResource resource : user.getResources()) {
            for (SchemaMapping mapping : resource.getMappings()) {
                if (mapping.isAccountid() && mapping.getIntMappingType() == IntMappingType.Username) {

                    propByRes.addOldAccountId(resource.getName(), oldUsername);
                }
            }
        }
    }

    // attributes, derived attributes, virtual attributes and resources
    propByRes.merge(fill(user, userMod, AttributableUtil.USER, scce));

    // store the role ids of membership required to be added
    Set<Long> membershipToBeAddedRoleIds = new HashSet<Long>();
    for (MembershipMod membToBeAdded : userMod.getMembershipsToBeAdded()) {
        membershipToBeAddedRoleIds.add(membToBeAdded.getRole());
    }

    // memberships to be removed
    Membership membership = null;
    for (Long membershipId : userMod.getMembershipsToBeRemoved()) {
        LOG.debug("Membership to be removed: {}", membershipId);

        membership = membershipDAO.find(membershipId);
        if (membership == null) {
            LOG.debug("Invalid membership id specified to be removed: {}", membershipId);
        } else {
            for (ExternalResource resource : membership.getSyncopeRole().getResources()) {

                if (!membershipToBeAddedRoleIds.contains(membership.getSyncopeRole().getId())) {

                    propByRes.add(PropagationOperation.DELETE, resource.getName());
                }
            }

            // In order to make the removeMembership() below to work,
            // we need to be sure to take exactly the same membership
            // of the user object currently in memory (which has potentially
            // some modifications compared to the one stored in the DB
            membership = user.getMembership(membership.getSyncopeRole().getId());
            if (membershipToBeAddedRoleIds.contains(membership.getSyncopeRole().getId())) {

                Set<Long> attributeIds = new HashSet<Long>(membership.getAttributes().size());
                for (AbstractAttr attribute : membership.getAttributes()) {
                    attributeIds.add(attribute.getId());
                }
                for (Long attributeId : attributeIds) {
                    attributeDAO.delete(attributeId, MAttr.class);
                }
                attributeIds.clear();

                // remove derived attributes
                for (AbstractDerAttr derAttr : membership.getDerivedAttributes()) {

                    attributeIds.add(derAttr.getId());
                }
                for (Long derAttrId : attributeIds) {
                    derAttrDAO.delete(derAttrId, MDerAttr.class);
                }
                attributeIds.clear();

                // remove virtual attributes
                for (AbstractVirAttr virAttr : membership.getVirtualAttributes()) {

                    attributeIds.add(virAttr.getId());
                }
                for (Long virAttrId : attributeIds) {
                    virAttrDAO.delete(virAttrId, MVirAttr.class);
                }
                attributeIds.clear();
            } else {
                user.removeMembership(membership);

                membershipDAO.delete(membershipId);
            }
        }
    }

    // memberships to be added
    for (MembershipMod membershipMod : userMod.getMembershipsToBeAdded()) {
        LOG.debug("Membership to be added: role({})", membershipMod.getRole());

        SyncopeRole role = roleDAO.find(membershipMod.getRole());
        if (role == null) {
            LOG.debug("Ignoring invalid role {}", membershipMod.getRole());
        } else {
            membership = user.getMembership(role.getId());
            if (membership == null) {
                membership = new Membership();
                membership.setSyncopeRole(role);
                membership.setSyncopeUser(user);

                user.addMembership(membership);

                propByRes.addAll(PropagationOperation.UPDATE, role.getResourceNames());
            }

            propByRes.merge(fill(membership, membershipMod, AttributableUtil.MEMBERSHIP, scce));
        }
    }

    // now, let's see if there are new resource subscriptions without
    // providing password
    Set<String> updatedResources = user.getResourceNames();
    updatedResources.removeAll(currentResources);
    if (!updatedResources.isEmpty() && StringUtils.isBlank(userMod.getPassword())) {

        SyncopeClientException sce = new SyncopeClientException(
                SyncopeClientExceptionType.RequiredValuesMissing);
        sce.addElement("password cannot be empty " + "when subscribing to new resources");
        scce.addException(sce);

        throw scce;
    }

    return propByRes;
}

From source file:com.npower.dm.hibernate.management.ProfileAssignmentManagementBeanImpl.java

/**
 * Add or UpdateEntity the value of ProfileAttributeEntity ownen by the
 * ProfileAssignmentEntity. If not found ProfileAttributeValueEntity by the
 * attributename, will create a single value, binary mode value object. If
 * found it, will override the attribute to single value, binary mode value's
 * object. all of multi-value item will be deleted.
 * //from   w w  w.ja  v  a2  s.  co  m
 * Caution: Assign null to value is permitted. this will set the value to
 * null, AttributeValue will not be deleted!
 * 
 * Caution: Order of AttributeValue will automaticlly increased! The
 * AttributeValue added lastestly will be bottom.
 * 
 * @param name
 * @param value
 * @throws DMException
 */
public void setAttributeValue(ProfileAssignment assignment, String name, InputStream value)
        throws DMException, IOException {

    // update this profile config, first. make sure the profileID will generated
    // by hibernate.
    Session hsession = this.getHibernateSession();
    hsession.saveOrUpdate(assignment);

    Blob blobValue = null;
    try {
        blobValue = (value != null) ? Hibernate.createBlob(value) : null;
    } catch (IOException e) {
        throw e;
    }

    // Check exists?
    Set<ProfileAssignmentValue> vMaps = ((ProfileAssignmentEntity) assignment).getProfileAssignValues();
    for (Iterator<ProfileAssignmentValue> i = vMaps.iterator(); i.hasNext();) {
        ProfileAssignmentValue vMap = (ProfileAssignmentValue) i.next();
        ProfileAttributeValueEntity v = (ProfileAttributeValueEntity) vMap.getProfileAttribValue();
        if (name.equals(v.getProfileAttribute().getName())) {
            v.setBinaryData(blobValue);
            // Set to single value mode
            v.setIsMultiValued(false);
            v.setItemDataKind(ProfileAttributeValue.ITEM_DATA_KIND_TEXT);
            v.setMFormat(DDFNode.DDF_FORMAT_BIN);
            // Delete multiple value
            Set<?> valueItemSet = v.getProfileValueItems();
            for (Iterator<?> j = valueItemSet.iterator(); j.hasNext();) {
                hsession.delete(j.next());
            }
            valueItemSet.clear();
            return;
        }
    }

    // Create a new AttributeValue
    ProfileTemplate template = assignment.getProfileConfig().getProfileTemplate();
    ManagementBeanFactory factory = this.getManagementBeanFactory();
    ProfileAttribute attr = factory.createProfileTemplateBean().getProfileAttributeByName(template.getName(),
            name);
    if (attr == null) {
        throw new DMException(
                "Could not find attribute by name: " + name + " from the template: " + template.getName());
    }

    ProfileAttributeValue av = new ProfileAttributeValueEntity();
    av.setProfileAttribute(attr);
    av.setBinaryData(blobValue);
    av.setIsMultiValued(false);
    av.setItemDataKind(ProfileAttributeValue.ITEM_DATA_KIND_TEXT);
    av.setMFormat(DDFNode.DDF_FORMAT_BIN);
    hsession.saveOrUpdate(av);

    // New a ProfileAssignmentValueID
    ProfileAssignmentValueID mapID = new ProfileAssignmentValueID();
    mapID.setAttributeValueId(av.getID());
    mapID.setProfileAssignmentId(assignment.getID());
    // New a ProfileAssignmentValue
    // long index = this.getProfileValueMaps().size() + 1;
    ProfileAssignmentValue map = new ProfileAssignmentValue(mapID, av, assignment);

    // Link to ProfileAssignmentEntity
    ((ProfileAssignmentEntity) assignment).getProfileAssignValues().add(map);

    hsession.saveOrUpdate(map);
}

From source file:org.aksw.resparql.ServerMethods.java

public Model getAreaStatistics(RectangularShape shape, List<String> classUris) throws Exception {
    Model result = ModelFactory.createDefaultModel();

    classUris = new ArrayList<String>();
    classUris.add("http://linkedgeodata.org/ontology/Amenity");
    classUris.add("http://linkedgeodata.org/ontology/Tourism");
    classUris.add("http://linkedgeodata.org/ontology/Leisure");
    classUris.add("http://linkedgeodata.org/ontology/Shop");

    Map<String, Set<Tag>> classToTags = new HashMap<String, Set<Tag>>();

    // Reverse map all classUris
    OntologyDAO dao = lgdRDFDAO.getOntologyDAO();
    for (String uri : classUris) {
        Set<Tag> tags = MultiMaps.addKey(classToTags, uri);

        MultiMap<Tag, IOneOneTagMapper> rev = dao.reverseMapResourceObject(RDF.type.getURI(), uri);

        for (Tag tag : rev.keySet()) {
            if (tag.getKey() == null) {
                tags.clear();
                tags.add(tag);//from   ww  w .java2 s.c o m
            } else if (tag.getValue() == null) {
                for (Iterator<Tag> it = tags.iterator(); it.hasNext();) {
                    if (it.next().getKey().equals(tag.getKey())) {
                        it.remove();
                    }
                }
                tags.add(tag);
            } else {
                boolean isSubsumed = false;
                for (Tag tmp : tags) {
                    if (tmp.getKey() == null || tmp.getValue() == null) {
                        isSubsumed = true;
                        break;
                    }
                }

                if (!isSubsumed) {
                    tags.add(tag);
                }
            }
        }
    }

    Set<String> keys = new HashSet<String>();
    //Set<Tag> tags = new HashSet<Tag>();
    for (Set<Tag> tags : classToTags.values()) {
        for (Tag tag : tags) {
            if (tag.getValue() == null) {
                keys.add(tag.getKey());
            }
        }
    }

    Connection conn = lgdRDFDAO.getConnection();
    String sql = LGDQueries.buildAreaStatsQueryExact(shape, keys);

    java.sql.ResultSet rs = conn.createStatement().executeQuery(sql);
    Map<String, Long> keyToCount = new HashMap<String, Long>();
    while (rs.next()) {
        keyToCount.put(rs.getString("k"), rs.getLong("c"));
    }
    //for(

    Map<String, Long> uriToCount = new HashMap<String, Long>();
    // Cross check the tags with the requested classes

    return result;
}

From source file:ch.mlutz.plugins.t4e.tapestry.TapestryModule.java

/**
 * Removes all Tapestry elements from this module; methods cascadingly
 * removes its elements from Tapestry index.
 *///from   w  ww  .  java  2s  .c o  m
public void clear() {
    List<Set<? extends TapestryElement>> listOfElementSets = getListOfElementSets();
    for (Set<? extends TapestryElement> set : listOfElementSets) {
        for (TapestryElement element : set) {
            this.changeListener.elementRemoved(this, element);
        }
        set.clear();
    }
}