Example usage for java.util Collection remove

List of usage examples for java.util Collection remove

Introduction

In this page you can find the example usage for java.util Collection remove.

Prototype

boolean remove(Object o);

Source Link

Document

Removes a single instance of the specified element from this collection, if it is present (optional operation).

Usage

From source file:org.fenixedu.academic.domain.student.Registration.java

private void setStudentCandidacyInformation(final StudentCandidacy studentCandidacy) {
    setStudentCandidacy(studentCandidacy);
    if (studentCandidacy != null) {
        super.setEntryPhase(studentCandidacy.getEntryPhase());
        super.setIngressionType(studentCandidacy.getIngressionType());

        if (studentCandidacy.getIngressionType().isReIngression()) {
            final Degree sourceDegree = studentCandidacy.getDegreeCurricularPlan().getEquivalencePlan()
                    .getSourceDegree();/*from w w  w. ja va2  s.c  o m*/
            Registration registration = getStudent().readRegistrationByDegree(sourceDegree);
            if (registration == null) {
                final Collection<Registration> registrations = getStudent()
                        .getRegistrationsMatchingDegreeType(DegreeType::isPreBolonhaDegree);
                registrations.remove(this);
                registration = registrations.size() == 1 ? registrations.iterator().next() : null;
            }

            setSourceRegistration(registration);
        }
    }
}

From source file:com.nextep.designer.sqlgen.oracle.impl.OracleCapturer.java

/**
 * Retrieves all user packages defined in the provided database connection. A collection of all
 * versionable packages will be returned. An empty collection will be returned if no package
 * exists in the provided database connection.
 * /*from  w  w w.j  av a 2s  .c o m*/
 * @param conn database connection from which packages should be retrieved
 * @return a collection of all existing packages
 * @throws SQLException if any error occurred while connecting with database
 */
private Collection<IVersionable<IPackage>> getPackages(Connection conn, IProgressMonitor monitor) {
    Collection<IVersionable<IPackage>> packages = new ArrayList<IVersionable<IPackage>>();
    monitor.subTask("Retrieving packages...");
    Statement stmt = null;
    ResultSet rset = null;
    long start = 0;

    try {
        stmt = conn.createStatement();

        try {
            if (LOGGER.isDebugEnabled())
                start = System.currentTimeMillis();
            rset = stmt.executeQuery("SELECT name, type, text " //$NON-NLS-1$
                    + "FROM user_source " //$NON-NLS-1$
                    + "WHERE type IN ('PACKAGE', 'PACKAGE BODY') " //$NON-NLS-1$
                    + "ORDER BY name, type, line"); //$NON-NLS-1$
            if (LOGGER.isDebugEnabled())
                LOGGER.debug("[Packages][PLSQL] query time: " + (System.currentTimeMillis() - start) + "ms"); //$NON-NLS-2$

            if (LOGGER.isDebugEnabled())
                start = System.currentTimeMillis();
            IPackage currentPackage = null;
            boolean isWrapped = false;
            boolean insideBody = false;
            // final String NEWLINE= System.getProperty("line.separator");
            while (rset.next()) {
                monitor.worked(1);
                final String name = rset.getString(1);
                final String type = rset.getString(2);
                final String text = rset.getString(3);

                if (currentPackage == null || !name.equals(currentPackage.getName())) {
                    if (currentPackage != null) {
                        if (currentPackage.getSpecSourceCode() == null) {
                            LOGGER.warn("Skipping package '" + currentPackage.getName() + "': Spec not found.");
                            packages.remove(currentPackage);
                        }
                        if (currentPackage.getBodySourceCode() == null) {
                            currentPackage.setBodySourceCode(""); //$NON-NLS-1$
                        }
                    }
                    insideBody = false;
                    if (!isWrapped) {
                        // postPackageCleanUp(currentPackage);
                    }
                    currentPackage = VersionableFactory.createVersionable(IPackage.class).getVersionnedObject()
                            .getModel();
                    currentPackage.setName(name);
                    isWrapped = text.contains("wrapped"); //$NON-NLS-1$
                    packages.add(VersionHelper.getVersionable(currentPackage));
                }
                if ("PACKAGE".equals(type)) { //$NON-NLS-1$
                    // text = removeChar(text,'\n');
                    // text = removeChar(text,'\r');
                    currentPackage.setSpecSourceCode(currentPackage.getSpecSourceCode() == null ? text
                            : (currentPackage.getSpecSourceCode() + text));
                } else if ("PACKAGE BODY".equals(type)) { //$NON-NLS-1$
                    if (!insideBody) {
                        isWrapped = text.contains("wrapped"); //$NON-NLS-1$
                        insideBody = true;
                    }
                    if (!isWrapped) {
                        // text = removeChar(text,'\n');
                        // text = removeChar(text,'\r');
                        currentPackage.setBodySourceCode(currentPackage.getBodySourceCode() == null ? text
                                : (currentPackage.getBodySourceCode() + text));
                    } else {
                        currentPackage.setBodySourceCode(currentPackage.getBodySourceCode() == null ? text
                                : (currentPackage.getBodySourceCode() + text));
                    }
                }
            }
            if (LOGGER.isDebugEnabled())
                LOGGER.debug("[Packages][PLSQL] fetching time: " + (System.currentTimeMillis() - start) + "ms"); //$NON-NLS-2$

            if (currentPackage != null) {
                if (currentPackage.getBodySourceCode() == null || currentPackage.getSpecSourceCode() == null) {
                    LOGGER.warn("Skipping package '" + currentPackage.getName() + "': Spec or body not found.");
                    packages.remove(currentPackage);
                }
            }
            if (currentPackage != null && !isWrapped) {
                // postPackageCleanUp(currentPackage);
            }
        } finally {
            CaptureHelper.safeClose(rset, null);
        }

        // FIXME [BGA] What is the purpose of this block of code since
        // retrieved Java procedure
        // are not put in the returned collection of this method?

        // Retrieving java source
        try {
            if (LOGGER.isDebugEnabled())
                start = System.currentTimeMillis();
            rset = stmt.executeQuery("SELECT name, text " //$NON-NLS-1$
                    + "FROM user_source " //$NON-NLS-1$
                    + "WHERE type = 'JAVA SOURCE' " //$NON-NLS-1$
                    + "ORDER BY name, line"); //$NON-NLS-1$
            if (LOGGER.isDebugEnabled())
                LOGGER.debug("[Packages][Java] query time: " //$NON-NLS-1$
                        + (System.currentTimeMillis() - start) + "ms"); //$NON-NLS-1$

            if (LOGGER.isDebugEnabled())
                start = System.currentTimeMillis();
            IProcedure currentProc = null;
            while (rset.next()) {
                monitor.worked(1);
                final String name = rset.getString(1);
                final String line = rset.getString(2);

                if (currentProc == null || !name.equals(currentProc.getName())) {
                    IVersionable<IProcedure> v = VersionableFactory.createVersionable(IProcedure.class);
                    currentProc = v.getVersionnedObject().getModel();
                    currentProc.setName(name);
                    currentProc.setLanguageType(LanguageType.JAVA);
                    currentProc.setSQLSource(line);
                } else {
                    currentProc.setSQLSource(currentProc.getSQLSource() + line);
                }
            }
            if (LOGGER.isDebugEnabled())
                LOGGER.debug("[Packages][Java] fetching time: " + (System.currentTimeMillis() - start) + "ms"); //$NON-NLS-2$
        } finally {
            CaptureHelper.safeClose(rset, null);
        }
    } catch (SQLException e) {
        LOGGER.warn(MessageFormat.format(SQLGenMessages.getString("capturer.error.genericCapturerError"), //$NON-NLS-1$
                DBVendor.ORACLE.toString()) + e.getMessage(), e);
    } finally {
        CaptureHelper.safeClose(null, stmt);
    }

    return packages;
}

From source file:org.cloudata.core.master.CloudataMaster.java

private boolean assignTablet(TabletInfo tabletInfo, Collection<TabletServerInfo> tabletServers)
        throws IOException {
    synchronized (tabletInfoMonitor) {
        if (assigningTablets.containsKey(tabletInfo)) {
            LOG.info("Already requested assigning(" + tabletInfo + ")");
            return true;
        }//  w  w  w. j a v a  2 s .  c o m
        unassignedTablets.remove(tabletInfo.getTabletName());
        assigningTablets.put(tabletInfo.getTabletName(), tabletInfo);
    }

    int retry = 0;
    //   TabletServer ?  
    while (retry < 5) {
        int runningSize = tabletServers.size();
        if (runningSize == 0) {
            LOG.debug("no active tablet server. can't assign tablet: " + tabletInfo);
            break;
        }

        TabletServerInfo tabletServerInfo = null;
        try {
            long startTime = System.currentTimeMillis();
            TabletManagerProtocol tabletServer = null;
            while (true) {
                tabletServerInfo = selectTabletServerForAssignment(tabletServers);

                if (tabletServerInfo != null) {
                    try {
                        tabletServer = connectTabletServer(tabletServerInfo);
                        break;
                    } catch (IOException e) {
                        tabletServers.remove(tabletServerInfo);
                    }
                }
                if (System.currentTimeMillis() - startTime > (10 * 1000)) {
                    break;
                }
            }
            if (tabletServerInfo == null) {
                LOG.error("Can't find proper tablet server(live TabletServer=" + runningSize);
                retry++;
                continue;
            }

            try {
                tabletServer.assignTablet(tabletInfo);
            } catch (IOException e) {
                // ?   TabletServer? ?  ?
                //FIXME ? Tablet? ? TabletServer? ?  ??   
                if (!liveTabletServers.containsKey(tabletServerInfo.getHostName())) {
                    LOG.error("Can't assign to " + tabletServerInfo.getHostName());
                    tabletServers.remove(tabletServerInfo);
                    retry++;
                    continue;
                }
                int errorRetry = 0;
                while (true) {
                    try {
                        TabletInfo assignedTabletInfo = tabletServer.getTabletInfo(tabletInfo.getTabletName());
                        if (assignedTabletInfo != null) {
                            //?? ??
                            break;
                        }
                    } catch (IOException err) {
                        LOG.warn("Error checking Tablet already assigned:" + tabletInfo + ","
                                + tabletServerInfo);
                        Thread.sleep(1000);
                        errorRetry++;
                        if (errorRetry >= 10) {
                            throw new IOException(e);
                        }
                    }
                }
            }

            tabletInfo.setAssignedHostName(tabletServerInfo.getHostName());

            tabletServerInfo.addNumOfTablets();

            LOG.info("assignTablet: tabletName=" + tabletInfo.getTabletName() + ", assignedHost="
                    + tabletServerInfo.getHostName());
            return true;
        } catch (Exception e) {
            LOG.warn("error while assignment. but retry:" + e.getMessage());
            if (tabletServerInfo != null) {
                LOG.warn("Exception in assigning tablet : " + tabletInfo.getTabletName() + " to host : "
                        + tabletServerInfo.getHostName() + ". Retry count : " + (retry + 1), e);
            } else {
                LOG.warn("Exception in assigning tablet : " + tabletInfo.getTabletName()
                        + " to host null. Retry count : " + (retry + 1), e);
            }
            retry++;
            try {
                Thread.sleep(1 * 1000);
            } catch (InterruptedException e1) {
            }
        }
    }

    // if assignment fail
    synchronized (tabletInfoMonitor) {
        assigningTablets.remove(tabletInfo.getTabletName());
        unassignedTablets.put(tabletInfo.getTabletName(), tabletInfo);
    }
    LOG.debug("Assignment fail:" + tabletInfo);
    return false;
}

From source file:org.biomart.configurator.controller.MartController.java

/**
 * Given a set of tables, produce the minimal set of datasets which include all the specified tables. Tables can be
 * included in the same dataset if they are linked by 1:M relations (1:M, 1:M in a chain), or if the table is the
 * last in the chain and is linked to the previous table by a pair of 1:M and M:1 relations via a third table,
 * simulating a M:M relation./*from   www.  ja  v a  2 s  . c  o m*/
 * <p>
 * If the chains of tables fork, then one dataset is generated for each branch of the fork.
 * <p>
 * Every suggested dataset is synchronised before being returned.
 * <p>
 * Datasets will be named after their central tables. If a dataset with that name already exists, a '_' and sequence
 * number will be appended to make the new dataset name unique.
 * <p>
 * 
 * @param includeTables
 *            the tables that must appear in the final set of datasets.
 * @return the collection of datasets generated.
 * @throws SQLException
 *             if there is any problem talking to the source database whilst generating the dataset.
 * @throws DataModelException
 *             if synchronisation fails.
 */
private Collection<Mart> suggestMarts(final MartRegistry registry, final TargetSchema schema,
        final Collection<SourceTable> includeTables) throws SQLException, DataModelException {
    Log.debug("Suggesting datasets for " + includeTables);

    // The root tables are all those which do not have a M:1 relation
    // to another one of the initial set of tables. This means that
    // extra datasets will be created for each table at the end of
    // 1:M:1 relation, so that any further tables past it will still
    // be included.
    Log.debug("Finding root tables");
    final Collection<SourceTable> rootTables = new HashSet<SourceTable>(includeTables);
    for (final Iterator<SourceTable> i = includeTables.iterator(); i.hasNext();) {
        final SourceTable candidate = i.next();
        for (final Iterator<Relation> j = candidate.getRelations().iterator(); j.hasNext();) {
            final Relation rel = j.next();
            if (rel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                continue;
            if (!rel.isOneToMany())
                continue;
            if (!rel.getManyKey().getTable().equals(candidate))
                continue;
            if (includeTables.contains(rel.getFirstKey().getTable()))
                rootTables.remove(candidate);
        }
    }
    // We construct one dataset per root table.
    final List<Mart> suggestedMarts = new ArrayList<Mart>();
    for (final Iterator<SourceTable> i = rootTables.iterator(); i.hasNext();) {
        final SourceTable rootTable = i.next();
        Log.debug("Constructing dataset for root table " + rootTable);
        Mart tmpMart = null;
        /*
         * if(reuseMart) { tmpMart = registry.getMartByName(rootTable.getName()); } else
         */
        tmpMart = new Mart(registry, rootTable.getName(), rootTable);
        tmpMart.setHasSource(true);
        tmpMart.setTargetSchema(schema);
        // Process it.
        final Collection<SourceTable> tablesIncluded = new HashSet<SourceTable>();
        tablesIncluded.add(rootTable);
        Log.debug("Attempting to find subclass marts");
        suggestedMarts
                .addAll(this.continueSubclassing(registry, includeTables, tablesIncluded, tmpMart, rootTable));
    }

    // Synchronise them all.
    Log.debug("Synchronising constructed marts");
    for (Mart ds : suggestedMarts) {
        ds.setTargetSchema(schema);
        this.synchronise(ds);
    }

    // Do any of the resulting datasets contain all the tables
    // exactly with subclass relations between each?
    // If so, just use that one dataset and forget the rest.
    Log.debug("Finding perfect candidate");
    Mart perfectDS = null;
    for (final Iterator<Mart> i = suggestedMarts.iterator(); i.hasNext() && perfectDS == null;) {
        final Mart candidate = i.next();

        // A candidate is a perfect match if the set of tables
        // covered by the subclass relations is the same as the
        // original set of tables requested.
        final Collection<Table> scTables = new HashSet<Table>();
        for (final Iterator<Relation> j = candidate.getRelations().iterator(); j.hasNext();) {
            final Relation r = j.next();
            if (!r.isSubclassRelation(candidate.getName()))
                continue;
            scTables.add(r.getFirstKey().getTable());
            scTables.add(r.getSecondKey().getTable());
        }
        // Finally perform the check to see if we have them all.
        if (scTables.containsAll(includeTables))
            perfectDS = candidate;
    }
    if (perfectDS != null) {
        Log.debug("Perfect candidate found - dropping others");
        // Drop the others.
        for (final Iterator<Mart> i = suggestedMarts.iterator(); i.hasNext();) {
            final Mart candidate = i.next();
            if (!candidate.equals(perfectDS)) {
                registry.removeMart(candidate);
                i.remove();
            }
        }
        // Rename it to lose any extension it may have gained.
        String newName = perfectDS.getCentralTable().getName();
        String uniqueName = registry.getNextMartName(newName);
        perfectDS.setName(uniqueName);
    } else
        Log.debug("No perfect candidate found - retaining all");

    // Return the final set of suggested datasets.
    return suggestedMarts;
}

From source file:org.sakaiproject.unboundid.UnboundidDirectoryProvider.java

/**
 * Similar to iterating over <code>users</code> passing
 * each element to {@link #getUser(UserEdit)}, removing the
 * {@link org.sakaiproject.user.api.UserEdit} if that method 
 * returns <code>false</code>. 
 * //from   w  w  w.ja  v  a2s  .co m
 * <p>Adds search retry capability if any one lookup fails 
 * with a directory error. Empties <code>users</code> and 
 * returns if a retry exits exceptionally
 * <p>
 */
public void getUsers(Collection<UserEdit> users) {
    log.debug("getUsers(): [Collection size = {}]", users.size());

    boolean abortiveSearch = false;
    int maxQuerySize = getMaxObjectsToQueryFor();
    UserEdit userEdit = null;

    HashMap<String, UserEdit> usersToSearchInLDAP = new HashMap<String, UserEdit>();
    List<UserEdit> usersToRemove = new ArrayList<UserEdit>();
    try {
        int cnt = 0;
        for (Iterator<UserEdit> userEdits = users.iterator(); userEdits.hasNext();) {
            userEdit = (UserEdit) userEdits.next();
            String eid = userEdit.getEid();

            if (!(isSearchableEid(eid))) {
                userEdits.remove();
                //proceed ahead with this (perhaps the final) iteration
                //usersToSearchInLDAP needs to be processed unless empty
            } else {
                usersToSearchInLDAP.put(eid, userEdit);
                cnt++;
            }

            // We need to make sure this query isn't larger than maxQuerySize
            if ((!userEdits.hasNext() || cnt == maxQuerySize) && !usersToSearchInLDAP.isEmpty()) {
                String filter = ldapAttributeMapper.getManyUsersInOneSearch(usersToSearchInLDAP.keySet());
                List<LdapUserData> ldapUsers = searchDirectory(filter, null, null, null, maxQuerySize);

                for (LdapUserData ldapUserData : ldapUsers) {
                    String ldapEid = ldapUserData.getEid();

                    if (StringUtils.isEmpty(ldapEid)) {
                        continue;
                    }
                    ldapEid = ldapEid.toLowerCase();

                    UserEdit ue = usersToSearchInLDAP.get(ldapEid);
                    mapUserDataOntoUserEdit(ldapUserData, ue);
                    usersToSearchInLDAP.remove(ldapEid);
                }

                // see if there are any users that we could not find in the LDAP query
                for (Map.Entry<String, UserEdit> entry : usersToSearchInLDAP.entrySet()) {
                    usersToRemove.add(entry.getValue());
                }

                // clear the HashMap and reset the counter
                usersToSearchInLDAP.clear();
                cnt = 0;
            }
        }

        // Finally clean up the original collection and remove and users we could not find
        for (UserEdit userRemove : usersToRemove) {
            log.debug("Unboundid getUsers could not find user: {}", userRemove.getEid());
            users.remove(userRemove);
        }

    } catch (LDAPException e) {
        abortiveSearch = true;
        throw new RuntimeException("getUsers(): LDAPException during search [eid = "
                + (userEdit == null ? null : userEdit.getEid()) + "][result code = " + e.errorCodeToString()
                + "][error message = " + e.getLDAPErrorMessage() + "]", e);
    } catch (Exception e) {
        abortiveSearch = true;
        throw new RuntimeException("getUsers(): RuntimeException during search eid = "
                + (userEdit == null ? null : userEdit.getEid()) + "]", e);
    } finally {
        // no sense in returning a partially complete search result
        if (abortiveSearch) {
            log.debug("getUsers(): abortive search, clearing received users collection");
            users.clear();
        }
    }

}

From source file:org.zanata.service.impl.TranslationServiceImpl.java

private Boolean saveBatch(final Set<String> extensions, final List<String> warnings, final HLocale locale,
        HDocument document, final MergeType mergeType, final Collection<Long> removedTextFlowTargetIds,
        final Optional<AsyncTaskHandle> handleOp, final Long projectIterationId,
        final List<TextFlowTarget> batch, final boolean assignCreditToUploader,
        final TranslationSourceType translationSourceType) {
    // we need to call clear at the beginning because text flow target
    // history rely on after commit callback.
    textFlowTargetDAO.clear();//from  w w  w.  j  a  v a2s  .  c  om
    document = entityManager.find(HDocument.class, document.getId());
    boolean changed = false;
    // we need a fresh object in this session,
    // so that it can lazily load associated objects
    HProjectIteration iteration = projectIterationDAO.findById(projectIterationId);
    Map<String, HTextFlow> resIdToTextFlowMap = textFlowDAO.getByDocumentAndResIds(document,
            Lists.transform(batch, new Function<TextFlowTarget, String>() {

                @Override
                public String apply(TextFlowTarget input) {
                    return input.getResId();
                }
            }));
    final int numPlurals = resourceUtils.getNumPlurals(document, locale);
    List<TextFlowTargetStateChange> targetStates = Lists.newArrayList();
    Map<ContentState, Long> contentStateDeltas = Maps.newHashMap();
    for (TextFlowTarget incomingTarget : batch) {
        String resId = incomingTarget.getResId();
        String sourceHash = incomingTarget.getSourceHash();
        HTextFlow textFlow = resIdToTextFlowMap.get(resId);
        if (textFlow == null) {
            // return warning for unknown resId to caller
            String warning = "Could not find TextFlow for TextFlowTarget " + resId + " with contents: "
                    + incomingTarget.getContents();
            warnings.add(warning);
            log.warn("skipping TextFlowTarget with unknown resId: {}", resId);
        } else if (sourceHash != null && !sourceHash.equals(textFlow.getContentHash())) {
            String warning = MessageFormat.format(
                    "TextFlowTarget {0} may be obsolete; associated source hash: {1}; expected hash is {2} for source: {3}",
                    resId, sourceHash, textFlow.getContentHash(), textFlow.getContents());
            warnings.add(warning);
            log.warn("skipping TextFlowTarget {} with unknown sourceHash: {}", resId, sourceHash);
        } else {
            String validationMessage = validateTranslations(incomingTarget.getState(), iteration,
                    incomingTarget.getResId(), textFlow.getContents(), incomingTarget.getContents());
            if (!StringUtils.isEmpty(validationMessage)) {
                warnings.add(validationMessage);
                log.warn(validationMessage);
                continue;
            }
            int nPlurals = textFlow.isPlural() ? numPlurals : 1;
            // we have eagerly loaded all targets upfront
            HTextFlowTarget hTarget = textFlow.getTargets().get(locale.getId());
            ContentState oldState = ContentState.New;
            if (hTarget != null) {
                oldState = hTarget.getState();
                if (mergeType == MergeType.IMPORT) {
                    removedTextFlowTargetIds.remove(hTarget.getId());
                }
            }
            TranslationMergeServiceFactory.MergeContext mergeContext = new TranslationMergeServiceFactory.MergeContext(
                    mergeType, textFlow, locale, hTarget, nPlurals);
            TranslationMergeService mergeService = translationMergeServiceFactory.getMergeService(mergeContext);
            boolean targetChanged = mergeService.merge(incomingTarget, hTarget, extensions);
            if (hTarget == null) {
                // in case hTarget was null, we need to
                // retrieve it after merge
                hTarget = textFlow.getTargets().get(locale.getId());
            }
            targetChanged |= adjustContentsAndState(hTarget, nPlurals, warnings);
            // update translation information if applicable
            if (targetChanged) {
                hTarget.setVersionNum(hTarget.getVersionNum() + 1);
                changed = true;
                if (assignCreditToUploader) {
                    HPerson hPerson = authenticatedAccount.getPerson();
                    hTarget.setTranslator(hPerson);
                    hTarget.setLastModifiedBy(hPerson);
                } else {
                    hTarget.setTranslator(null);
                    hTarget.setLastModifiedBy(authenticatedAccount.getPerson());
                }
                hTarget.setSourceType(translationSourceType);
                hTarget.setCopiedEntityId(null);
                hTarget.setCopiedEntityId(null);
                textFlowTargetDAO.makePersistent(hTarget);
                aggregateChanges(textFlow, hTarget, oldState, targetStates, contentStateDeltas);
            }
        }
        if (handleOp.isPresent()) {
            handleOp.get().increaseProgress(1);
        }
    }
    if (!targetStates.isEmpty()) {
        Long actorId = assignCreditToUploader ? authenticatedAccount.getPerson().getId() : null;
        DocumentLocaleKey documentLocaleKey = new DocumentLocaleKey(document.getId(), locale.getLocaleId());
        TextFlowTargetStateEvent tftUpdatedEvent = new TextFlowTargetStateEvent(documentLocaleKey,
                projectIterationId, actorId, ImmutableList.copyOf(targetStates));
        textFlowTargetStateEvent.fire(tftUpdatedEvent);
        DocStatsEvent docEvent = new DocStatsEvent(documentLocaleKey, projectIterationId, contentStateDeltas,
                Iterables.getLast(tftUpdatedEvent.getStates()).getTextFlowTargetId());
        docStatsEvent.fire(docEvent);
    }
    textFlowTargetDAO.flush();
    return changed;
}

From source file:net.sourceforge.fenixedu.domain.student.Registration.java

private void setStudentCandidacyInformation(final StudentCandidacy studentCandidacy) {
    setStudentCandidacy(studentCandidacy);
    if (studentCandidacy != null) {
        super.setEntryPhase(studentCandidacy.getEntryPhase());
        super.setIngression(studentCandidacy.getIngression());

        if (studentCandidacy.getIngression() == Ingression.RI) {
            final Degree sourceDegree = studentCandidacy.getDegreeCurricularPlan().getEquivalencePlan()
                    .getSourceDegree();//  w  w w  .j a v a  2  s  .c o  m
            Registration registration = getStudent().readRegistrationByDegree(sourceDegree);
            if (registration == null) {
                final Collection<Registration> registrations = getStudent()
                        .getRegistrationsByDegreeType(DegreeType.DEGREE);
                registrations.remove(this);
                registration = registrations.size() == 1 ? registrations.iterator().next() : null;
            }

            setSourceRegistration(registration);
        }
    }
}

From source file:org.apache.hadoop.dfs.FSNamesystem.java

/**
 * Modify (block-->datanode) map.  Possibly generate 
 * replication tasks, if the removed block is still valid.
 *//*from w  w w.  ja  v a 2 s.com*/
synchronized void removeStoredBlock(Block block, DatanodeDescriptor node) {
    NameNode.stateChangeLog.debug("BLOCK* NameSystem.removeStoredBlock: " + block + " from " + node.getName());
    if (!blocksMap.removeNode(block, node)) {
        NameNode.stateChangeLog.debug("BLOCK* NameSystem.removeStoredBlock: " + block
                + " has already been removed from node " + node);
        return;
    }

    //
    // It's possible that the block was removed because of a datanode
    // failure.  If the block is still valid, check if replication is
    // necessary.  In that case, put block on a possibly-will-
    // be-replicated list.
    //
    INode fileINode = blocksMap.getINode(block);
    if (fileINode != null) {
        decrementSafeBlockCount(block);
        updateNeededReplications(block, -1, 0);
    }

    //
    // We've removed a block from a node, so it's definitely no longer
    // in "excess" there.
    //
    Collection<Block> excessBlocks = excessReplicateMap.get(node.getStorageID());
    if (excessBlocks != null) {
        excessBlocks.remove(block);
        NameNode.stateChangeLog
                .debug("BLOCK* NameSystem.removeStoredBlock: " + block + " is removed from excessBlocks");
        if (excessBlocks.size() == 0) {
            excessReplicateMap.remove(node.getStorageID());
        }
    }

    // Remove the replica from corruptReplicas
    corruptReplicas.removeFromCorruptReplicasMap(block, node);
}

From source file:ontopoly.model.RoleField.java

/**
 * Change field value order so that the first value is ordered directly after the second value.
 **/// w  w w . j  a va 2s .co m
public void moveAfter(Topic instance, RoleField ofield, RoleField.ValueIF rfv1, RoleField.ValueIF rfv2) {
    Topic p1 = rfv1.getPlayer(ofield, instance);
    Topic p2 = rfv2.getPlayer(ofield, instance);

    TopicIF typeIf = OntopolyModelUtils.getTopicIF(instance.getTopicMap(), PSI.ON, "field-value-order");
    LocatorIF datatype = DataTypes.TYPE_STRING;
    TopicIF fieldDefinitionIf = getTopicIF();

    TopicIF topicIf = instance.getTopicIF();
    TopicIF p1topic = p1.getTopicIF();
    TopicIF p2topic = p2.getTopicIF();

    Collection<Topic> alltopics = getValues(instance, ofield);

    Map<Topic, OccurrenceIF> topics_occs = getValuesWithOrdering(instance);

    List<OccurrenceIF> occs = new ArrayList<OccurrenceIF>(topics_occs.values());
    Collections.sort(occs, new Comparator<OccurrenceIF>() {
        @Override
        public int compare(OccurrenceIF occ1, OccurrenceIF occ2) {
            return StringUtils.compare(occ1.getValue(), occ2.getValue());
        }
    });

    TopicMapBuilderIF builder = topicIf.getTopicMap().getBuilder();

    OccurrenceIF maxOcc = (occs.isEmpty() ? null : occs.get(occs.size() - 1));
    int fieldOrderMax = (maxOcc == null ? 0 : Ordering.stringToOrder(maxOcc.getValue()));

    // make sure this value has an order value
    OccurrenceIF p1occ = null;
    OccurrenceIF p2occ = topics_occs.get(p2);
    OccurrenceIF next_occ = null;
    int fieldOrderP2;
    int nextOrder = Ordering.MAX_ORDER;
    if (p2occ == null) {
        fieldOrderP2 = (fieldOrderMax == 0 ? 0 : fieldOrderMax + Ordering.ORDER_INCREMENTS);
        p2occ = builder.makeOccurrence(topicIf, typeIf, Ordering.orderToString(fieldOrderP2), datatype);
        p2occ.addTheme(fieldDefinitionIf);
        p2occ.addTheme(p2topic);
    } else {
        fieldOrderP2 = Ordering.stringToOrder(p2occ.getValue());
        // find occurrence after p2occ
        int indexP2occ = occs.indexOf(p2occ);
        if (indexP2occ < (occs.size() - 1))
            next_occ = occs.get(indexP2occ + 1);
        if (next_occ != null) {
            // if next then average this and next field orders
            int fieldOrderNext = Ordering.stringToOrder(next_occ.getValue());
            nextOrder = (fieldOrderP2 + fieldOrderNext) / 2;
            if (nextOrder != fieldOrderP2) {
                p1occ = topics_occs.get(p1);
                if (p1occ != null) {
                    p1occ.setValue(Ordering.orderToString(nextOrder));
                } else {
                    p1occ = builder.makeOccurrence(topicIf, typeIf, Ordering.orderToString(nextOrder),
                            datatype);
                    p1occ.addTheme(fieldDefinitionIf);
                    p1occ.addTheme(p1topic);
                }
            }
        }
    }
    if (nextOrder == Ordering.MAX_ORDER)
        nextOrder = fieldOrderP2;
    if (p1occ == null) {
        nextOrder += Ordering.ORDER_INCREMENTS;
        p1occ = topics_occs.get(p1);
        if (p1occ != null) {
            p1occ.setValue(Ordering.orderToString(nextOrder));
        } else {
            p1occ = builder.makeOccurrence(topicIf, typeIf, Ordering.orderToString(nextOrder), datatype);
            p1occ.addTheme(fieldDefinitionIf);
            p1occ.addTheme(p1topic);
        }

        // we need to reshuffle all existing orders after p2
        int indexP2occ = occs.indexOf(p2occ);
        if (indexP2occ > 0) {
            for (int i = indexP2occ + 1; i < occs.size(); i++) {
                OccurrenceIF occ = occs.get(i);
                nextOrder += Ordering.ORDER_INCREMENTS;
                occ.setValue(Ordering.orderToString(nextOrder));
            }
        }
    }
    // assign ordering to all topics with no existing ordering
    alltopics.remove(p1);
    alltopics.remove(p2);
    Iterator<Topic> aiter = alltopics.iterator();
    while (aiter.hasNext()) {
        Topic atopic = aiter.next();
        if (!topics_occs.containsKey(atopic)) {
            nextOrder += Ordering.ORDER_INCREMENTS;
            OccurrenceIF occ = builder.makeOccurrence(topicIf, typeIf, Ordering.orderToString(nextOrder),
                    datatype);
            occ.addTheme(fieldDefinitionIf);
            occ.addTheme(atopic.getTopicIF());
        }
    }
}