Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:com.amalto.core.storage.hibernate.HibernateStorage.java

private void cleanImpactedTables(List<ComplexTypeMetadata> sortedTypesToDrop) {
    Set<String> tablesToDrop = findTablesToDrop(sortedTypesToDrop);
    int totalCount = tablesToDrop.size();
    int totalRound = 0;
    Connection connection = null;
    try {/*from  ww w.  j  a  v a 2  s.c  o  m*/
        connection = DriverManager.getConnection(dataSource.getConnectionURL(), dataSource.getUserName(),
                dataSource.getPassword());
        int successCount = 0;
        while (successCount < totalCount && totalRound++ < totalCount) {
            Set<String> dropedTables = new HashSet<String>();
            for (String table : tablesToDrop) {
                Statement statement = connection.createStatement();
                try {
                    statement.executeUpdate("DROP TABLE " + table); //$NON-NLS-1$
                    dropedTables.add(table);
                    successCount++;
                } catch (SQLException e) {
                    LOGGER.warn("Could not delete '" + table + "' in round " + totalRound + "."); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
                } finally {
                    statement.close();
                }
            }
            tablesToDrop.removeAll(dropedTables);
        }
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("Successfully deleted " + successCount + " tables (out of " + totalCount //$NON-NLS-1$//$NON-NLS-2$
                    + " tables) in " + totalRound + " rounds."); //$NON-NLS-1$ //$NON-NLS-2$
        }
    } catch (SQLException e) {
        throw new RuntimeException("Could not acquire connection to database.", e); //$NON-NLS-1$
    } finally {
        try {
            if (connection != null) {
                connection.close();
            }
        } catch (SQLException e) {
            LOGGER.error("Unexpected error on connection close.", e); //$NON-NLS-1$
        }
    }
}

From source file:com.joliciel.talismane.parser.TransitionBasedGlobalLearningParser.java

public List<ParseConfiguration> parseSentence(List<PosTagSequence> posTagSequences,
        FeatureWeightVector weightVector, RankingSolution correctSolution) {
    MONITOR.startTask("parseSentence");
    try {//from  www  . jav  a2  s .  com
        long startTime = (new Date()).getTime();
        int maxAnalysisTimeMilliseconds = maxAnalysisTimePerSentence * 1000;
        int minFreeMemoryBytes = minFreeMemory * KILOBYTE;

        TokenSequence tokenSequence = posTagSequences.get(0).getTokenSequence();

        TreeMap<Integer, TreeSet<ParseConfiguration>> heaps = new TreeMap<Integer, TreeSet<ParseConfiguration>>();

        TreeSet<ParseConfiguration> heap0 = new TreeSet<ParseConfiguration>();
        for (PosTagSequence posTagSequence : posTagSequences) {
            // add an initial ParseConfiguration for each postag sequence
            ParseConfiguration initialConfiguration = this.getParserServiceInternal()
                    .getInitialConfiguration(posTagSequence);
            initialConfiguration.setScoringStrategy(new SimpleRankingScoringStrategy());
            initialConfiguration.setRankingScore(0.0);
            heap0.add(initialConfiguration);
            if (LOG.isDebugEnabled()) {
                LOG.debug("Adding initial posTagSequence: " + posTagSequence);
            }
        }
        heaps.put(0, heap0);
        TreeSet<ParseConfiguration> backupHeap = null;

        TreeSet<ParseConfiguration> finalHeap = null;
        while (heaps.size() > 0) {
            Entry<Integer, TreeSet<ParseConfiguration>> heapEntry = heaps.firstEntry();
            TreeSet<ParseConfiguration> currentHeap = heapEntry.getValue();
            int currentHeapIndex = heapEntry.getKey();
            if (LOG.isTraceEnabled()) {
                LOG.trace("##### Polling next heap: " + heapEntry.getKey() + ", size: "
                        + heapEntry.getValue().size());
            }

            boolean finished = false;
            // systematically set the final heap here, just in case we exit "naturally" with no more heaps
            finalHeap = heapEntry.getValue();
            backupHeap = new TreeSet<ParseConfiguration>();

            // we jump out when either (a) all tokens have been attached or (b) we go over the max alloted time
            ParseConfiguration topConf = currentHeap.first();
            if (topConf.isTerminal()) {
                LOG.trace("Exiting with terminal heap: " + heapEntry.getKey() + ", size: "
                        + heapEntry.getValue().size());
                finished = true;
            }

            // check if we've gone over alloted time for this sentence
            long analysisTime = (new Date()).getTime() - startTime;
            if (maxAnalysisTimePerSentence > 0 && analysisTime > maxAnalysisTimeMilliseconds) {
                LOG.info("Parse tree analysis took too long for sentence: " + tokenSequence.getText());
                LOG.info("Breaking out after " + maxAnalysisTimePerSentence + " seconds.");
                finished = true;
            }

            // check if we've enough memory to process this sentence
            if (minFreeMemory > 0) {
                long freeMemory = Runtime.getRuntime().freeMemory();
                if (freeMemory < minFreeMemoryBytes) {
                    LOG.info("Not enough memory left to parse sentence: " + tokenSequence.getText());
                    LOG.info("Min free memory (bytes):" + minFreeMemoryBytes);
                    LOG.info("Current free memory (bytes): " + freeMemory);
                    finished = true;
                }
            }

            // check if any of the remaining top-N solutions on any heap can lead to the correct solution
            if (correctSolution != null) {
                boolean canReachCorrectSolution = false;
                for (TreeSet<ParseConfiguration> heap : heaps.values()) {
                    int j = 1;
                    for (ParseConfiguration solution : heap) {
                        if (j > beamWidth)
                            break;
                        if (solution.canReach(correctSolution)) {
                            canReachCorrectSolution = true;
                            break;
                        }
                        j++;
                    }
                    if (canReachCorrectSolution)
                        break;
                }
                if (!canReachCorrectSolution) {
                    LOG.debug("None of the solutions on the heap can reach the gold solution. Exiting.");
                    finished = true;
                }
            }

            if (finished) {
                // combine any remaining heaps
                for (TreeSet<ParseConfiguration> heap : heaps.values()) {
                    if (finalHeap != heap) {
                        finalHeap.addAll(heap);
                    }
                }
                break;
            }

            // remove heap from set of heaps
            heapEntry = heaps.pollFirstEntry();

            // limit the breadth to K
            int maxSolutions = currentHeap.size() > this.beamWidth ? this.beamWidth : currentHeap.size();

            int j = 0;
            while (currentHeap.size() > 0) {
                ParseConfiguration history = currentHeap.pollFirst();
                backupHeap.add(history);
                if (LOG.isTraceEnabled()) {
                    LOG.trace("### Next configuration on heap " + heapEntry.getKey() + ":");
                    LOG.trace(history.toString());
                    LOG.trace("Score: " + df.format(history.getScore()));
                    LOG.trace(history.getPosTagSequence());
                }

                Set<Transition> transitions = new HashSet<Transition>();

                // test the positive rules on the current configuration
                boolean ruleApplied = false;
                if (parserPositiveRules != null) {
                    MONITOR.startTask("check rules");
                    try {
                        for (ParserRule rule : parserPositiveRules) {
                            if (LOG.isTraceEnabled()) {
                                LOG.trace("Checking rule: " + rule.getCondition().getName());
                            }
                            RuntimeEnvironment env = this.featureService.getRuntimeEnvironment();
                            FeatureResult<Boolean> ruleResult = rule.getCondition().check(history, env);
                            if (ruleResult != null && ruleResult.getOutcome()) {
                                transitions.add(rule.getTransition());
                                ruleApplied = true;
                                if (LOG.isTraceEnabled()) {
                                    LOG.trace("Rule applies. Setting transition to: "
                                            + rule.getTransition().getCode());
                                }

                                if (!rule.getTransition().checkPreconditions(history)) {
                                    LOG.error("Cannot apply rule, preconditions not met.");
                                    ruleApplied = false;
                                }
                                break;
                            }
                        }
                    } finally {
                        MONITOR.endTask("check rules");
                    }
                }

                if (!ruleApplied) {
                    transitions = parsingConstrainer.getPossibleTransitions(history);

                    Set<Transition> eliminatedTransitions = new HashSet<Transition>();
                    for (Transition transition : transitions) {
                        if (!transition.checkPreconditions(history)) {
                            eliminatedTransitions.add(transition);
                        }
                    }
                    transitions.removeAll(eliminatedTransitions);

                    // apply the negative rules
                    eliminatedTransitions = new HashSet<Transition>();
                    if (parserNegativeRules != null) {
                        MONITOR.startTask("check negative rules");
                        try {
                            for (ParserRule rule : parserNegativeRules) {
                                if (LOG.isTraceEnabled()) {
                                    LOG.trace("Checking negative rule: " + rule.getCondition().getName());
                                }
                                RuntimeEnvironment env = this.featureService.getRuntimeEnvironment();
                                FeatureResult<Boolean> ruleResult = rule.getCondition().check(history, env);
                                if (ruleResult != null && ruleResult.getOutcome()) {
                                    eliminatedTransitions.add(rule.getTransition());
                                    if (LOG.isTraceEnabled()) {
                                        LOG.debug("Rule applies. Eliminating transition: "
                                                + rule.getTransition().getCode());
                                    }
                                }
                            }

                            if (eliminatedTransitions.size() == transitions.size()) {
                                LOG.debug("All transitions eliminated! Restoring original transitions.");
                            } else {
                                transitions.removeAll(eliminatedTransitions);
                            }
                        } finally {
                            MONITOR.endTask("check negative rules");
                        }
                    }
                } // has a positive rule been applied?

                if (transitions.size() == 0) {
                    // just in case the we run out of both heaps and analyses, we build this backup heap
                    backupHeap.add(history);
                    if (LOG.isTraceEnabled())
                        LOG.trace(
                                "No transitions could be applied: not counting this solution as part of the beam");
                } else {
                    // up the counter, since we will count this solution towards the heap
                    j++;
                    // add solutions to the heap, one per valid transition
                    MONITOR.startTask("heap sort");
                    try {
                        Map<Transition, Double> deltaScorePerTransition = new HashMap<Transition, Double>();
                        double absoluteMax = 1;

                        for (Transition transition : transitions) {
                            if (LOG.isTraceEnabled()) {
                                LOG.trace("Applying transition: " + transition.getCode());
                            }
                            ParseConfiguration configuration = this.parserServiceInternal
                                    .getConfiguration(history);
                            transition.apply(configuration);
                            configuration.setRankingScore(history.getRankingScore());
                            configuration.getIncrementalFeatureResults()
                                    .addAll(history.getIncrementalFeatureResults());

                            // test the features on the new configuration
                            double scoreDelta = 0.0;
                            MONITOR.startTask("feature analyse");
                            List<FeatureResult<?>> featureResults = new ArrayList<FeatureResult<?>>();
                            try {
                                for (ParseConfigurationFeature<?> feature : this.parseFeatures) {
                                    MONITOR.startTask(feature.getName());
                                    try {
                                        RuntimeEnvironment env = this.featureService.getRuntimeEnvironment();
                                        FeatureResult<?> featureResult = feature.check(configuration, env);
                                        if (featureResult != null) {
                                            featureResults.add(featureResult);
                                            double weight = weightVector.getWeight(featureResult);
                                            scoreDelta += weight;
                                            if (LOG.isTraceEnabled()) {
                                                LOG.trace(featureResult.toString() + " = " + weight);
                                            }
                                        }
                                    } finally {
                                        MONITOR.endTask(feature.getName());
                                    }
                                }
                                configuration.getIncrementalFeatureResults().add(featureResults);
                                if (LOG.isTraceEnabled()) {
                                    LOG.trace("Score = " + configuration.getRankingScore() + " + " + scoreDelta
                                            + " = " + (configuration.getRankingScore() + scoreDelta));
                                }
                                configuration.setRankingScore(configuration.getRankingScore() + scoreDelta);
                                deltaScorePerTransition.put(transition, scoreDelta);
                                if (Math.abs(scoreDelta) > absoluteMax)
                                    absoluteMax = Math.abs(scoreDelta);

                            } finally {
                                MONITOR.endTask("feature analyse");
                            }

                            int nextHeapIndex = parseComparisonStrategy.getComparisonIndex(configuration)
                                    * 1000;
                            while (nextHeapIndex <= currentHeapIndex)
                                nextHeapIndex++;

                            TreeSet<ParseConfiguration> nextHeap = heaps.get(nextHeapIndex);
                            if (nextHeap == null) {
                                nextHeap = new TreeSet<ParseConfiguration>();
                                heaps.put(nextHeapIndex, nextHeap);
                                if (LOG.isTraceEnabled())
                                    LOG.trace("Created heap with index: " + nextHeapIndex);
                            }
                            nextHeap.add(configuration);
                            if (LOG.isTraceEnabled()) {
                                LOG.trace("Added configuration with score " + configuration.getScore()
                                        + " to heap: " + nextHeapIndex + ", total size: " + nextHeap.size());
                            }

                            configuration.clearMemory();
                        } // next transition

                        // Create a probability distribution of transitions
                        // normalise probabilities for each transition via normalised exponential
                        // e^(x/absmax)/sum(e^(x/absmax))
                        // where x/absmax is in [-1,1]
                        // e^(x/absmax) is in [1/e,e]

                        double total = 0.0;
                        for (Transition transition : deltaScorePerTransition.keySet()) {
                            double deltaScore = deltaScorePerTransition.get(transition);
                            deltaScore = Math.exp(deltaScore / absoluteMax);
                            deltaScorePerTransition.put(transition, deltaScore);
                            total += deltaScore;
                        }

                        for (Transition transition : deltaScorePerTransition.keySet()) {
                            double probability = deltaScorePerTransition.get(transition);
                            probability /= total;
                            Decision<Transition> decision = machineLearningService.createDecision(transition,
                                    probability);
                            transition.setDecision(decision);
                            if (LOG.isTraceEnabled()) {
                                LOG.trace("Transition: " + transition.getCode() + ", Prob: " + probability);
                            }
                        }

                    } finally {
                        MONITOR.endTask("heap sort");
                    }
                } // have we any transitions?

                // beam width test
                if (j == maxSolutions)
                    break;
            } // next history   
        } // next atomic index

        // return the best sequences on the heap
        List<ParseConfiguration> bestConfigurations = new ArrayList<ParseConfiguration>();
        int i = 0;

        if (finalHeap.isEmpty())
            finalHeap = backupHeap;

        while (!finalHeap.isEmpty()) {
            bestConfigurations.add(finalHeap.pollFirst());
            i++;
            if (i >= this.getBeamWidth())
                break;
        }
        if (LOG.isDebugEnabled()) {
            if (correctSolution != null) {
                LOG.debug("Gold transitions: " + correctSolution.getIncrementalOutcomes());
            }
            for (ParseConfiguration finalConfiguration : bestConfigurations) {
                LOG.debug(df.format(finalConfiguration.getScore()) + ": " + finalConfiguration.toString());
                LOG.debug("Pos tag sequence: " + finalConfiguration.getPosTagSequence());
                LOG.debug("Transitions: " + finalConfiguration.getTransitions());
                if (LOG.isTraceEnabled()) {
                    StringBuilder sb = new StringBuilder();
                    sb.append(" * PosTag sequence score ");
                    sb.append(df.format(finalConfiguration.getPosTagSequence().getScore()));
                    sb.append(" = ");
                    for (PosTaggedToken posTaggedToken : finalConfiguration.getPosTagSequence()) {
                        sb.append(" * ");
                        sb.append(df.format(posTaggedToken.getDecision().getProbability()));
                    }
                    sb.append(" root ");
                    sb.append(finalConfiguration.getPosTagSequence().size());
                    LOG.trace(sb.toString());

                    sb = new StringBuilder();
                    sb.append(" * Token sequence score = ");
                    sb.append(df.format(finalConfiguration.getPosTagSequence().getTokenSequence().getScore()));
                    LOG.trace(sb.toString());

                }
            }
        }
        return bestConfigurations;
    } finally {
        MONITOR.endTask("parseSentence");
    }
}

From source file:com.diversityarrays.kdxplore.curate.TrialDataEditor.java

private void checkForInvalidTraits() {
    SampleGroup db_smdata = curationData.getDatabaseSampleGroup();
    if (db_smdata == null) {
        return;// ww  w .jav  a  2s.co  m
    }
    Bag<Integer> traitIdCountsFromDatabase = new HashBag<>();
    for (KdxSample sm : db_smdata.getSamples()) {
        traitIdCountsFromDatabase.add(sm.getTraitId());
    }

    Set<Integer> errorTraitIds = new HashSet<>(traitIdCountsFromDatabase.uniqueSet());

    Set<Integer> traitIds = CurationData.getTraitIds(curationData);
    errorTraitIds.removeAll(traitIds);

    if (!errorTraitIds.isEmpty()) {
        for (Integer id : errorTraitIds) {
            if (id == null) {
                continue;
            }
            Trait trait = this.traitProvider.apply(id);
            if (trait == null) {
                trait = new Trait();
                trait.setTraitId(id);
                trait.setTraitName("Unknown Trait#" + id);
            }
            errorTraitsById.put(id, trait);
        }
    }
}

From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java

@RequestMapping(value = "/restartAllBatchInstance", method = RequestMethod.GET)
@ResponseBody/*  w  w w .j a v  a  2 s . co m*/
public String restartAllBatchInstance(final HttpServletResponse resp, final HttpServletRequest req) {
    String isSuccess = WebServiceUtil.EMPTY_STRING;
    String userName = req.getUserPrincipal().getName(); // ToDo fetch from authentication header.
    Set<String> userRoles = userConnectivityService.getUserGroups(userName);
    List<BatchInstanceStatus> batchStatusList = new ArrayList<BatchInstanceStatus>();
    batchStatusList.add(BatchInstanceStatus.READY_FOR_REVIEW);
    batchStatusList.add(BatchInstanceStatus.READY_FOR_VALIDATION);
    List<BatchInstance> batchInstanceList = biService.getBatchInstanceByStatusList(batchStatusList);
    final boolean isZipSwitchOn = bsService.isZipSwitchOn();
    if (batchInstanceList.size() > 0) {
        for (BatchInstance batchInstance : batchInstanceList) {
            Set<String> batchInstanceRoles = biService.getRolesForBatchInstance(batchInstance);
            String batchInstanceIdentifier = batchInstance.getIdentifier();
            logger.info("Restarting batch instance : " + batchInstanceIdentifier);
            if (batchInstanceRoles.removeAll(userRoles)) {
                String activityName = workflowService.getActiveModule(batchInstance);
                if (activityName != null) {
                    int indexOf = activityName.indexOf('.');
                    indexOf = indexOf == -1 ? activityName.length() : indexOf;
                    String moduleName = activityName.substring(0, indexOf);
                    try {
                        String batchClassIdentifier = biService
                                .getBatchClassIdentifier(batchInstanceIdentifier);
                        isSuccess = processRestartingBatchInternal(batchInstanceIdentifier, moduleName,
                                isSuccess.toString(), batchInstance, batchClassIdentifier, isZipSwitchOn,
                                activityName);
                    } catch (Exception e) {
                        if (isSuccess.isEmpty()) {
                            isSuccess += "Error in restarting following batch instance identifiers: ";
                        } else {
                            isSuccess += ", ";
                        }
                        isSuccess += batchInstanceIdentifier;
                        logger.error("Error while restarting batch instance: " + batchInstanceIdentifier);
                    }
                } else {
                    if (isSuccess.isEmpty()) {
                        isSuccess += "Error in restarting batch instance identifiers are : ";
                    } else {
                        isSuccess += ", ";
                    }
                    isSuccess += batchInstanceIdentifier;
                }
            } else {
                if (isSuccess.isEmpty()) {
                    isSuccess += "Error in restarting following batch instance identifiers:";
                } else {
                    isSuccess += ", ";
                }
                isSuccess += "User is not authorized for id:" + batchInstanceIdentifier;
                logger.error("Error while restarting batch instance: " + batchInstanceIdentifier);
            }
        }

    } else {
        isSuccess = "No results found.";
    }
    return isSuccess;
}

From source file:loci.formats.in.LIFReader.java

/** Parses a string of XML and puts the values in a Hashtable. */
private void initMetadata(String xml) throws FormatException, IOException {
    try {//from   w w  w.j  a va2s  .  c  o  m
        ServiceFactory factory = new ServiceFactory();
        OMEXMLService service = factory.getInstance(OMEXMLService.class);
        service.createOMEXMLMetadata();
    } catch (DependencyException exc) {
        throw new FormatException("Could not create OME-XML store.", exc);
    } catch (ServiceException exc) {
        throw new FormatException("Could not create OME-XML store.", exc);
    }
    MetadataStore store = makeFilterMetadata();

    // the XML blocks stored in a LIF file are invalid,
    // because they don't have a root node

    xml = "<?xml version=\"1.0\" encoding=\"" + ENCODING + "\"?><LEICA>" + xml + "</LEICA>";

    xml = XMLTools.sanitizeXML(xml);

    translateMetadata(getMetadataRoot(xml));

    for (int i = 0; i < imageNames.length; i++) {
        setSeries(i);
        addSeriesMeta("Image name", imageNames[i]);
    }
    setSeries(0);

    // set up mapping to rearrange channels
    // for instance, the green channel may be #0, and the red channel may be #1
    realChannel = new int[tileCount.length][];
    int nextLut = 0;

    for (int i = 0; i < core.size(); i++) {
        int index = getTileIndex(i);
        if (realChannel[index] != null) {
            continue;
        }
        CoreMetadata ms = core.get(i);
        realChannel[index] = new int[ms.sizeC];

        for (int q = 0; q < ms.sizeC; q++) {
            String lut = "";
            if (nextLut < lutNames.size()) {
                lut = lutNames.get(nextLut++).toLowerCase();
            }
            if (!CHANNEL_PRIORITIES.containsKey(lut))
                lut = "";
            realChannel[index][q] = CHANNEL_PRIORITIES.get(lut).intValue();
        }

        int[] sorted = new int[ms.sizeC];
        Arrays.fill(sorted, -1);

        for (int q = 0; q < sorted.length; q++) {
            int min = Integer.MAX_VALUE;
            int minIndex = -1;
            for (int n = 0; n < ms.sizeC; n++) {
                if (realChannel[index][n] < min && !DataTools.containsValue(sorted, n)) {
                    min = realChannel[index][n];
                    minIndex = n;
                }
            }

            sorted[q] = minIndex;
        }
    }

    MetadataTools.populatePixels(store, this, true, false);

    int roiCount = 0;
    for (int i = 0; i < getSeriesCount(); i++) {
        setSeries(i);

        String instrumentID = MetadataTools.createLSID("Instrument", i);
        store.setInstrumentID(instrumentID, i);

        int index = getTileIndex(i);

        store.setMicroscopeModel(microscopeModels[index], i);
        store.setMicroscopeType(getMicroscopeType("Other"), i);

        String objectiveID = MetadataTools.createLSID("Objective", i, 0);
        store.setObjectiveID(objectiveID, i, 0);
        store.setObjectiveLensNA(lensNA[index], i, 0);
        store.setObjectiveSerialNumber(serialNumber[index], i, 0);
        if (magnification[index] != null) {
            store.setObjectiveNominalMagnification(magnification[index], i, 0);
        }
        store.setObjectiveImmersion(getImmersion(immersions[index]), i, 0);
        store.setObjectiveCorrection(getCorrection(corrections[index]), i, 0);
        store.setObjectiveModel(objectiveModels[index], i, 0);

        if (cutIns[index] != null && filterModels[index] != null) {
            int channel = 0;
            if (cutIns[index].size() >= filterModels[index].size() * 2) {
                int diff = cutIns[index].size() - filterModels[index].size();
                for (int q = 0; q < diff; q++) {
                    cutIns[index].remove(filterModels[index].size());
                }
            }
            for (int filter = 0; filter < cutIns[index].size(); filter++) {
                String filterID = MetadataTools.createLSID("Filter", i, filter);
                store.setFilterID(filterID, i, filter);
                if (filterModels[index] != null && filter < filterModels[index].size()) {
                    store.setFilterModel((String) filterModels[index].get(filter), i, filter);
                }
                store.setTransmittanceRangeCutIn((Length) cutIns[index].get(filter), i, filter);
                store.setTransmittanceRangeCutOut((Length) cutOuts[index].get(filter), i, filter);
            }
        }

        final List<Double> lasers = laserWavelength[index];
        final List<Double> laserIntensities = laserIntensity[index];

        final List<Boolean> active = laserActive[index];
        final List<Boolean> frap = laserFrap[index];
        int nextChannel = 0;

        if (lasers != null) {
            int laserIndex = 0;
            while (laserIndex < lasers.size()) {
                if ((Double) lasers.get(laserIndex) == 0) {
                    lasers.remove(laserIndex);
                } else {
                    laserIndex++;
                }
            }

            for (int laser = 0; laser < lasers.size(); laser++) {
                String id = MetadataTools.createLSID("LightSource", i, laser);
                store.setLaserID(id, i, laser);
                store.setLaserType(LaserType.OTHER, i, laser);
                store.setLaserLaserMedium(LaserMedium.OTHER, i, laser);
                Double wavelength = (Double) lasers.get(laser);
                Length wave = FormatTools.getWavelength(wavelength);
                if (wave != null) {
                    store.setLaserWavelength(wave, i, laser);
                }
            }

            Set<Integer> ignoredChannels = new HashSet<Integer>();
            final List<Integer> validIntensities = new ArrayList<Integer>();
            int size = lasers.size();
            int channel = 0;
            Set<Integer> channels = new HashSet<Integer>();

            for (int laser = 0; laser < laserIntensities.size(); laser++) {
                double intensity = (Double) laserIntensities.get(laser);
                channel = laser / size;
                if (intensity < 100) {
                    validIntensities.add(laser);
                    channels.add(channel);
                }
                ignoredChannels.add(channel);
            }
            //remove channels w/o valid intensities
            ignoredChannels.removeAll(channels);
            //remove entries if channel has 2 wavelengths
            //e.g. 30% 458 70% 633
            int s = validIntensities.size();

            int jj;
            Set<Integer> toRemove = new HashSet<Integer>();

            int as = active.size();
            for (int j = 0; j < s; j++) {
                if (j < as && !(Boolean) active.get(j)) {
                    toRemove.add(validIntensities.get(j));
                }
                jj = j + 1;
                if (jj < s) {
                    int v = validIntensities.get(j) / size;
                    int vv = validIntensities.get(jj) / size;
                    if (vv == v) {//do not consider that channel.
                        toRemove.add(validIntensities.get(j));
                        toRemove.add(validIntensities.get(jj));
                        ignoredChannels.add(j);
                    }
                }
            }
            if (toRemove.size() > 0) {
                validIntensities.removeAll(toRemove);
            }

            boolean noNames = true;
            if (channelNames[index] != null) {
                for (String name : channelNames[index]) {
                    if (name != null && !name.equals("")) {
                        noNames = false;
                        break;
                    }
                }
            }
            if (!noNames && frap != null) { //only use name for frap.
                for (int k = 0; k < frap.size(); k++) {
                    if (!frap.get(k)) {
                        noNames = true;
                        break;
                    }
                }
            }

            int nextFilter = 0;
            //int nextFilter = cutIns[i].size() - getEffectiveSizeC();
            for (int k = 0; k < validIntensities.size(); k++, nextChannel++) {
                int laserArrayIndex = validIntensities.get(k);
                double intensity = (Double) laserIntensities.get(laserArrayIndex);
                int laser = laserArrayIndex % lasers.size();
                Double wavelength = (Double) lasers.get(laser);
                if (wavelength != 0) {
                    while (ignoredChannels.contains(nextChannel)) {
                        nextChannel++;
                    }
                    while (channelNames != null && nextChannel < getEffectiveSizeC()
                            && channelNames[index] != null && ((channelNames[index][nextChannel] == null
                                    || channelNames[index][nextChannel].equals("")) && !noNames)) {
                        nextChannel++;
                    }
                    if (nextChannel < getEffectiveSizeC()) {
                        String id = MetadataTools.createLSID("LightSource", i, laser);
                        store.setChannelLightSourceSettingsID(id, i, nextChannel);
                        store.setChannelLightSourceSettingsAttenuation(
                                new PercentFraction((float) intensity / 100f), i, nextChannel);

                        Length ex = FormatTools.getExcitationWavelength(wavelength);
                        if (ex != null) {
                            store.setChannelExcitationWavelength(ex, i, nextChannel);
                        }

                        if (wavelength > 0) {
                            if (cutIns[index] == null || nextFilter >= cutIns[index].size()) {
                                continue;
                            }
                            Double cutIn = ((Length) cutIns[index].get(nextFilter)).value(UNITS.NM)
                                    .doubleValue();
                            while (cutIn - wavelength > 20) {
                                nextFilter++;
                                if (nextFilter < cutIns[index].size()) {
                                    cutIn = ((Length) cutIns[index].get(nextFilter)).value(UNITS.NM)
                                            .doubleValue();
                                } else {
                                    break;
                                }
                            }
                            if (nextFilter < cutIns[index].size()) {
                                String fid = MetadataTools.createLSID("Filter", i, nextFilter);
                                //store.setLightPathEmissionFilterRef(fid, i, nextChannel, 0);
                                nextFilter++;
                            }
                        }
                    }
                }
            }
        }

        store.setImageInstrumentRef(instrumentID, i);
        store.setObjectiveSettingsID(objectiveID, i);
        store.setObjectiveSettingsRefractiveIndex(refractiveIndex[index], i);

        store.setImageDescription(descriptions[index], i);
        if (acquiredDate[index] > 0) {
            store.setImageAcquisitionDate(new Timestamp(DateTools.convertDate(
                    (long) (acquiredDate[index] * 1000), DateTools.COBOL, DateTools.ISO8601_FORMAT, true)), i);
        }
        store.setImageName(imageNames[index].trim(), i);

        Length sizeX = FormatTools.getPhysicalSizeX(physicalSizeXs.get(index));
        Length sizeY = FormatTools.getPhysicalSizeY(physicalSizeYs.get(index));
        Length sizeZ = FormatTools.getPhysicalSizeZ(zSteps[index]);

        if (sizeX != null) {
            store.setPixelsPhysicalSizeX(sizeX, i);
        }
        if (sizeY != null) {
            store.setPixelsPhysicalSizeY(sizeY, i);
        }
        if (sizeZ != null) {
            store.setPixelsPhysicalSizeZ(sizeZ, i);
        }
        if (tSteps[index] != null) {
            store.setPixelsTimeIncrement(new Time(tSteps[index], UNITS.S), i);
        }

        final List<String> detectors = detectorModels[index];
        if (detectors != null) {
            nextChannel = 0;
            int start = detectors.size() - getEffectiveSizeC();
            if (start < 0) {
                start = 0;
            }
            for (int detector = start; detector < detectors.size(); detector++) {
                int dIndex = detector - start;
                String detectorID = MetadataTools.createLSID("Detector", i, dIndex);
                store.setDetectorID(detectorID, i, dIndex);
                store.setDetectorModel((String) detectors.get(detector), i, dIndex);

                store.setDetectorZoom(zooms[index], i, dIndex);
                store.setDetectorType(DetectorType.PMT, i, dIndex);

                if (activeDetector[index] != null) {
                    int detectorIndex = activeDetector[index].size() - getEffectiveSizeC() + dIndex;
                    if (detectorIndex >= 0 && detectorIndex < activeDetector[index].size()
                            && (Boolean) activeDetector[index].get(detectorIndex)
                            && detectorOffsets[index] != null && nextChannel < detectorOffsets[index].length) {
                        store.setDetectorOffset(detectorOffsets[index][nextChannel++], i, dIndex);
                    }
                }
            }
        }

        final List<Boolean> activeDetectors = activeDetector[index];
        int firstDetector = activeDetectors == null ? 0 : activeDetectors.size() - getEffectiveSizeC();
        int nextDetector = firstDetector;

        int nextFilter = 0;
        int nextFilterDetector = 0;

        if (activeDetectors != null && activeDetectors.size() > cutIns[index].size()
                && (Boolean) activeDetectors.get(activeDetectors.size() - 1)
                && (Boolean) activeDetectors.get(activeDetectors.size() - 2)) {
            nextFilterDetector = activeDetectors.size() - cutIns[index].size();

            if (cutIns[index].size() > filterModels[index].size()) {
                nextFilterDetector += filterModels[index].size();
                nextFilter += filterModels[index].size();
            }
        }

        for (int c = 0; c < getEffectiveSizeC(); c++) {
            if (activeDetectors != null) {
                while (nextDetector >= 0 && nextDetector < activeDetectors.size()
                        && !(Boolean) activeDetectors.get(nextDetector)) {
                    nextDetector++;
                }
                if (nextDetector < activeDetectors.size() && detectors != null
                        && nextDetector - firstDetector < detectors.size()) {
                    String detectorID = MetadataTools.createLSID("Detector", i, nextDetector - firstDetector);
                    store.setDetectorSettingsID(detectorID, i, c);
                    nextDetector++;

                    if (detectorOffsets[index] != null && c < detectorOffsets[index].length) {
                        store.setDetectorSettingsOffset(detectorOffsets[index][c], i, c);
                    }

                    if (gains[index] != null) {
                        store.setDetectorSettingsGain(gains[index][c], i, c);
                    }
                }
            }

            if (channelNames[index] != null) {
                store.setChannelName(channelNames[index][c], i, c);
            }
            if (pinholes[index] != null) {
                store.setChannelPinholeSize(new Length(pinholes[index], UNITS.MICROM), i, c);
            }
            if (exWaves[index] != null) {
                if (exWaves[index][c] != null && exWaves[index][c] > 1) {
                    Length ex = FormatTools.getExcitationWavelength(exWaves[index][c]);
                    if (ex != null) {
                        store.setChannelExcitationWavelength(ex, i, c);
                    }
                }
            }

            // channel coloring is implicit if the image is stored as RGB
            Color channelColor = getChannelColor(realChannel[index][c]);
            if (!isRGB()) {
                store.setChannelColor(channelColor, i, c);
            }

            if (channelColor.getValue() != -1 && nextFilter >= 0) {
                if (nextDetector - firstDetector != getSizeC() && cutIns[index] != null
                        && nextDetector >= cutIns[index].size()) {
                    while (nextFilterDetector < firstDetector) {
                        String filterID = MetadataTools.createLSID("Filter", i, nextFilter);
                        store.setFilterID(filterID, i, nextFilter);

                        nextFilterDetector++;
                        nextFilter++;
                    }
                }
                while (activeDetectors != null && nextFilterDetector < activeDetectors.size()
                        && !(Boolean) activeDetectors.get(nextFilterDetector)) {
                    String filterID = MetadataTools.createLSID("Filter", i, nextFilter);
                    store.setFilterID(filterID, i, nextFilter);
                    nextFilterDetector++;
                    nextFilter++;
                }
                String filterID = MetadataTools.createLSID("Filter", i, nextFilter);
                store.setFilterID(filterID, i, nextFilter);
                store.setLightPathEmissionFilterRef(filterID, i, c, 0);
                nextFilterDetector++;
                nextFilter++;
            }
        }

        for (int image = 0; image < getImageCount(); image++) {
            Length xPos = posX[index];
            Length yPos = posY[index];
            if (i < fieldPosX.size() && fieldPosX.get(i) != null) {
                xPos = fieldPosX.get(i);
            }
            if (i < fieldPosY.size() && fieldPosY.get(i) != null) {
                yPos = fieldPosY.get(i);
            }
            if (xPos != null) {
                store.setPlanePositionX(xPos, i, image);
            }
            if (yPos != null) {
                store.setPlanePositionY(yPos, i, image);
            }
            store.setPlanePositionZ(posZ[index], i, image);
            if (timestamps[index] != null) {
                double timestamp = timestamps[index][image];
                if (timestamps[index][0] == acquiredDate[index]) {
                    timestamp -= acquiredDate[index];
                } else if (timestamp == acquiredDate[index] && image > 0) {
                    timestamp = timestamps[index][0];
                }
                store.setPlaneDeltaT(new Time(timestamp, UNITS.S), i, image);
            }

            if (expTimes[index] != null) {
                int c = getZCTCoords(image)[1];
                if (expTimes[index][c] != null) {
                    store.setPlaneExposureTime(new Time(expTimes[index][c], UNITS.S), i, image);
                }
            }
        }

        if (imageROIs[index] != null) {
            for (int roi = 0; roi < imageROIs[index].length; roi++) {
                if (imageROIs[index][roi] != null) {
                    imageROIs[index][roi].storeROI(store, i, roiCount++, roi);
                }
            }
        }
    }
}

From source file:com.redhat.rhn.frontend.xmlrpc.channel.software.ChannelSoftwareHandler.java

private Set<Errata> mergeErrataToChannel(User user, Set<Errata> errataToMerge, Channel toChannel,
        Channel fromChannel) {//from www .j av  a2 s.  co  m

    // find errata that we do not need to merge
    List<Errata> same = ErrataManager.listSamePublishedInChannels(user, fromChannel, toChannel);
    List<Errata> brothers = ErrataManager.listPublishedBrothersInChannels(user, fromChannel, toChannel);
    List<Errata> clones = ErrataManager.listPublishedClonesInChannels(user, fromChannel, toChannel);
    // and remove them
    errataToMerge.removeAll(same);
    errataToMerge.removeAll(brothers);
    errataToMerge.removeAll(clones);

    ErrataManager.publishErrataToChannelAsync(toChannel, getErrataIds(errataToMerge), user);

    // no need to regenerate errata cache, because we didn't touch any packages

    return errataToMerge;
}

From source file:de.erdesignerng.visual.jgraph.JGraphEditor.java

private List<Set<Table>> buildHierarchy(Model aModel) {
    // Try to build a hierarchy
    List<Set<Table>> theLayers = new ArrayList<>();
    Set<Table> theCurrentLayer = new HashSet<>();
    Set<Table> theAlreadyKnown = new HashSet<>();
    for (Table theTable : aModel.getTables()) {
        boolean isTopLevel = true;
        List<Relation> theRelations = aModel.getRelations().getExportedKeysFor(theTable);
        if (theRelations.size() == 0) {
            isTopLevel = true;/*w  ww  . j  a va  2s .c o  m*/
        } else {
            for (Relation theRelation : theRelations) {
                if (theRelation.getImportingTable() != theTable) {
                    isTopLevel = false;
                }
            }
        }
        if (isTopLevel) {
            theCurrentLayer.add(theTable);
            theAlreadyKnown.add(theTable);
        }
    }

    // Top Level components
    theLayers.add(theCurrentLayer);

    Set<Table> theTablesToSearch = new HashSet<>();
    theTablesToSearch.addAll(theCurrentLayer);
    while (theTablesToSearch.size() > 0) {
        theCurrentLayer = new HashSet<>();
        for (Table theTable : theTablesToSearch) {
            for (Relation theRelation : aModel.getRelations().getForeignKeysFor(theTable)) {
                if (theRelation.getExportingTable() != theTable
                        && !theAlreadyKnown.contains(theRelation.getExportingTable())) {
                    theCurrentLayer.add(theRelation.getExportingTable());
                    theAlreadyKnown.add(theRelation.getExportingTable());
                }
            }
        }
        if (theCurrentLayer.size() > 0) {

            Set<Table> theTablesToRemove = new HashSet<>();

            for (Table theTable : theCurrentLayer) {
                boolean isUsedInSameLayer = false;
                for (Relation theRelation : aModel.getRelations().getExportedKeysFor(theTable)) {
                    if (theRelation.getImportingTable() != theTable
                            && theCurrentLayer.contains(theRelation.getImportingTable())) {
                        isUsedInSameLayer = true;
                    }
                }
                if (isUsedInSameLayer) {
                    theTablesToRemove.add(theTable);
                }
            }

            theCurrentLayer.removeAll(theTablesToRemove);
            theAlreadyKnown.removeAll(theTablesToRemove);

            theLayers.add(theCurrentLayer);
            theTablesToSearch = theCurrentLayer;
        } else {
            theTablesToSearch.clear();
        }
    }
    return theLayers;
}

From source file:org.opencastproject.workflow.handler.CleanupWorkflowOperationHandler.java

/**
 * {@inheritDoc}//from www .  jav  a  2  s .  c om
 * 
 * @see org.opencastproject.workflow.api.AbstractWorkflowOperationHandler#start(org.opencastproject.workflow.api.WorkflowInstance,
 *      JobContext)
 */
@Override
public WorkflowOperationResult start(WorkflowInstance workflowInstance, JobContext context)
        throws WorkflowOperationException {
    MediaPackage mediaPackage = workflowInstance.getMediaPackage();
    WorkflowOperationInstance currentOperation = workflowInstance.getCurrentOperation();

    String flavors = currentOperation.getConfiguration(PRESERVE_FLAVOR_PROPERTY);
    final List<MediaPackageElementFlavor> flavorsToPreserve = new ArrayList<MediaPackageElementFlavor>();

    boolean deleteExternal = BooleanUtils.toBoolean(currentOperation.getConfiguration(DELETE_EXTERNAL));

    // If the configuration does not specify flavors, remove them all
    for (String flavor : asList(flavors)) {
        flavorsToPreserve.add(MediaPackageElementFlavor.parseFlavor(flavor));
    }

    String baseUrl = workspace.getBaseUri().toString();

    // Find all external working file repository base Urls
    List<String> externalWfrBaseUrls = new ArrayList<String>();
    if (deleteExternal) {
        try {
            for (ServiceRegistration reg : serviceRegistry
                    .getServiceRegistrationsByType(WorkingFileRepository.SERVICE_TYPE)) {
                if (baseUrl.startsWith(reg.getHost()))
                    continue;
                externalWfrBaseUrls.add(UrlSupport.concat(reg.getHost(), reg.getPath()));
            }
        } catch (ServiceRegistryException e) {
            logger.error("Unable to load WFR services from service registry: {}", e.getMessage());
            throw new WorkflowOperationException(e);
        }
    }

    // Some URIs are shared by multiple elements. If one of these elements should be deleted but another should not, we
    // must keep the file.
    Set<URI> urisToDelete = new HashSet<URI>();
    Set<URI> urisToKeep = new HashSet<URI>();
    for (MediaPackageElement element : mediaPackage.getElements()) {
        if (element.getURI() == null)
            continue;

        String elementUri = element.getURI().toString();
        if (!elementUri.startsWith(baseUrl)) {
            if (deleteExternal) {

                String wfrBaseUrl = null;
                for (String url : externalWfrBaseUrls) {
                    if (element.getURI().toString().startsWith(url)) {
                        wfrBaseUrl = url;
                        break;
                    }
                }
                if (wfrBaseUrl == null)
                    continue;

                HttpDelete delete;
                if (elementUri.startsWith(
                        UrlSupport.concat(wfrBaseUrl, WorkingFileRepository.MEDIAPACKAGE_PATH_PREFIX))) {
                    String wfrDeleteUrl = elementUri.substring(0, elementUri.lastIndexOf("/"));
                    delete = new HttpDelete(wfrDeleteUrl);
                } else if (elementUri.startsWith(
                        UrlSupport.concat(wfrBaseUrl, WorkingFileRepository.COLLECTION_PATH_PREFIX))) {
                    delete = new HttpDelete(elementUri);
                } else {
                    logger.info("Unable to handle URI {}", elementUri);
                    continue;
                }

                try {
                    HttpResponse response = client.execute(delete);
                    int statusCode = response.getStatusLine().getStatusCode();
                    if (statusCode == HttpStatus.SC_NO_CONTENT || statusCode == HttpStatus.SC_OK) {
                        logger.info("Sucessfully deleted external URI {}", delete.getURI());
                    } else if (statusCode == HttpStatus.SC_NOT_FOUND) {
                        logger.info("External URI {} has already been deleted", delete.getURI());
                    } else {
                        logger.info("Unable to delete external URI {}, status code '{}' returned",
                                delete.getURI(), statusCode);
                    }
                } catch (TrustedHttpClientException e) {
                    logger.warn("Unable to execute DELETE request on external URI {}", delete.getURI());
                    throw new WorkflowOperationException(e);
                }
            }
            continue;
        }

        // remove the element if it doesn't match the flavors to preserve
        boolean remove = true;
        for (MediaPackageElementFlavor flavor : flavorsToPreserve) {
            if (flavor.matches(element.getFlavor())) {
                remove = false;
                break;
            }
        }
        if (remove) {
            urisToDelete.add(element.getURI());
            mediaPackage.remove(element);
        } else {
            urisToKeep.add(element.getURI());
        }
    }

    // Remove all of the files to keep from the one to delete
    urisToDelete.removeAll(urisToKeep);

    // Now remove the files to delete
    for (URI uri : urisToDelete) {
        try {
            workspace.delete(uri);
        } catch (Exception e) {
            logger.warn("Unable to delete {}", uri);
        }
    }
    return createResult(mediaPackage, Action.CONTINUE);
}

From source file:de.thm.arsnova.dao.CouchDBDao.java

private List<SessionInfo> getVisitedSessionInfoData(List<Session> sessions, ExtendedView answeredQuestionsView,
        ExtendedView questionIdsView) {/*w  ww. j  av  a2 s.  c om*/
    final Map<String, Set<String>> answeredQuestionsMap = new HashMap<String, Set<String>>();
    final Map<String, Set<String>> questionIdMap = new HashMap<String, Set<String>>();
    final ViewResults answeredQuestionsViewResults = getDatabase().view(answeredQuestionsView);
    final ViewResults questionIdsViewResults = getDatabase().view(questionIdsView);

    // Maps a session ID to a set of question IDs of answered questions of that session
    for (final Document d : answeredQuestionsViewResults.getResults()) {
        final String sessionId = d.getJSONArray("key").getString(1);
        final String questionId = d.getString("value");
        Set<String> questionIdsInSession = answeredQuestionsMap.get(sessionId);
        if (questionIdsInSession == null) {
            questionIdsInSession = new HashSet<String>();
        }
        questionIdsInSession.add(questionId);
        answeredQuestionsMap.put(sessionId, questionIdsInSession);
    }

    // Maps a session ID to a set of question IDs of that session
    for (final Document d : questionIdsViewResults.getResults()) {
        final String sessionId = d.getString("key");
        final String questionId = d.getId();
        Set<String> questionIdsInSession = questionIdMap.get(sessionId);
        if (questionIdsInSession == null) {
            questionIdsInSession = new HashSet<String>();
        }
        questionIdsInSession.add(questionId);
        questionIdMap.put(sessionId, questionIdsInSession);
    }

    // For each session, count the question IDs that are not yet answered
    Map<String, Integer> unansweredQuestionsCountMap = new HashMap<String, Integer>();
    for (final Session s : sessions) {
        if (!questionIdMap.containsKey(s.get_id())) {
            continue;
        }
        // Note: create a copy of the first set so that we don't modify the contents in the original set
        Set<String> questionIdsInSession = new HashSet<String>(questionIdMap.get(s.get_id()));
        Set<String> answeredQuestionIdsInSession = answeredQuestionsMap.get(s.get_id());
        if (answeredQuestionIdsInSession == null) {
            answeredQuestionIdsInSession = new HashSet<String>();
        }
        questionIdsInSession.removeAll(answeredQuestionIdsInSession);
        unansweredQuestionsCountMap.put(s.get_id(), questionIdsInSession.size());
    }

    List<SessionInfo> sessionInfos = new ArrayList<SessionInfo>();
    for (Session session : sessions) {
        int numUnanswered = 0;

        if (unansweredQuestionsCountMap.containsKey(session.get_id())) {
            numUnanswered = unansweredQuestionsCountMap.get(session.get_id());
        }
        SessionInfo info = new SessionInfo(session);
        info.setNumUnanswered(numUnanswered);
        sessionInfos.add(info);
    }
    return sessionInfos;
}

From source file:com.aurel.track.fieldType.runtime.base.FieldsManagerRT.java

/**
 * Saves a modified or a new (copied or newly created) item
 * @param workItemContext/*from  w  ww . j  av  a2  s  .  c  o  m*/
 * @param errorsList
 * @param isCopy is it copy of the existing item
 * @param withNotify to send notification messages to the listener
 * @return
 */
public static boolean performSave(WorkItemContext workItemContext, List<ErrorData> errorsList, boolean isCopy,
        boolean withNotify/*, boolean cascadeChanges*/) {
    Set<Integer> presentFields = workItemContext.getPresentFieldIDs();
    TWorkItemBean workItemBean = workItemContext.getWorkItemBean();
    //force comment field if this is present in item bean
    if (workItemBean.getComment() != null && workItemBean.getComment().length() > 0) {
        presentFields.add(SystemFields.INTEGER_COMMENT);
    }
    TWorkItemBean workItemBeanOriginal = workItemContext.getWorkItemBeanOriginal();
    Integer personID = workItemContext.getPerson();
    Locale locale = workItemContext.getLocale();
    //fieldChangeID not null means editing/deleting only a history entry, typically comment
    Integer fieldChangeID = workItemContext.getFieldChangeID();
    boolean updateLastEdit = workItemContext.isUpdateLastEdit();
    boolean exchangeImport = workItemContext.isExchangeImport();
    //the list of events to be triggered during the save
    List<Integer> events = new LinkedList<Integer>();
    //The item has been changed (except state- and date-change: they are dealt with explicitely)
    boolean isChanged = false;
    //Whether it is a new item
    boolean isCreate = isCreate(workItemBean, isCopy);
    boolean saveNeeded = false;
    Integer workItemID = workItemBean.getObjectID();
    Integer parentID = workItemBean.getSuperiorworkitem();
    boolean archivedOrDeleted = workItemBean.isArchivedOrDeleted();
    Integer originalParentID = null;
    boolean archivedOrDeletedOriginal = false;
    if (workItemBeanOriginal != null) {
        originalParentID = workItemBeanOriginal.getSuperiorworkitem();
        archivedOrDeletedOriginal = workItemBeanOriginal.isArchivedOrDeleted();
    }
    //errors found
    if (!errorsList.isEmpty()) {
        return saveNeeded;
    }
    // Everything seems o.k. to update the workItem and associated tables
    Date now = new Date();
    workItemBean.setChangedByID(personID);
    ErrorData errorData;
    /*---------------------------------------------------------
     * create or copy: check permission for create.
     *---------------------------------------------------------*/
    //set the created timestamp only if not yet preset (for example by excel import it could be set already)
    if (isCopy || (isCreate && workItemBean.getCreated() == null)) {
        workItemBean.setCreated(now);
    }
    /**
     * Set some field values before saving the item: typical case for extensible select
     * It should be called before hasChanged because it might change the workItemBean
     *
     */
    processBeforeSave(workItemBean, workItemContext);
    /*-----------------------------------------------------------
     * Save the item
     *-----------------------------------------------------------*/
    isChanged = hasChanged(workItemBean, workItemBeanOriginal, presentFields, null, isCopy);
    if (isCreate || isCopy || isChanged) {
        saveNeeded = true;
        if (updateLastEdit) {
            //editing only a history entry (comment,attachment)
            //should not actualize the workItem's lastEdit
            workItemBean.setLastEdit(now);
        }
        //set the originator only if not yet preset (for example by excel import it could be set already)
        if (isCopy || (isCreate && workItemBean.getOriginatorID() == null)) {
            workItemBean.setOriginatorID(personID);
        }
        //try to save the workItem to the database
        Integer workItemKey = null;
        try {
            workItemKey = workItemDAO.save(workItemBean);
        } catch (ItemPersisterException e) {
            LOGGER.error("Saving of the workItem failed with " + e.getMessage());
        }
        if (workItemKey == null) {
            //the save failed for some reason (see log files)
            errorData = new ErrorData("item.err.saveFailed", workItemBean.getSynopsis());
            errorsList.add(errorData);
            return saveNeeded;
        }
        workItemBean.setObjectID(workItemKey);
        //if fieldChangeID is not null there is no reason for future
        //save processing because only a history entry changes
        //but the workItem itself was saved only for calling the
        //lucene indexer for the new comment
        if (fieldChangeID == null) {
            //save the custom attributes also
            if (isCopy) {
                //get the workItemBeanOriginal as null in order to save the custom attributes
                //(when the values are the same in the original and new (copied) workItem no database save happens)
                //and all custom fields should be saved not just those present in the current screen
                saveWorkItemCustomAttributesByCopy(workItemBean, null);
            } else {
                saveWorkItemCustomAttributes(workItemBean, workItemBeanOriginal, presentFields);
            }
            if (exchangeImport) {
                //save only the workItem itself but
                //the attachment and history data is saved in another place
                return saveNeeded;
            }
            boolean haveNewAttachments = false;
            if (isCreate) {
                //move attachments from sessionID temporary directory to issue directory
                List<TAttachmentBean> attachList = workItemContext.getAttachmentsList();
                String sessionID = workItemContext.getSessionID();
                if (sessionID != null && attachList != null && !attachList.isEmpty()) {
                    //save form web interface (not from email submission)
                    List<Integer> attachIDList = AttachBL.approve(attachList, sessionID,
                            workItemBean.getObjectID());
                    if (attachIDList != null && attachIDList.size() == attachList.size()) {
                        AttachBL.replaceInlineImagesDescription(workItemBean.getObjectID(), attachList,
                                attachIDList);
                    }
                    haveNewAttachments = true;
                }
            }
            // move the email attachments from temporary email directory to issue directory
            List<EmailAttachment> emailAttachmentList = workItemContext.getEmailAttachmentList();
            List<Integer> emailAttachmentIDList = null;
            if (emailAttachmentList != null && !emailAttachmentList.isEmpty()) {
                emailAttachmentIDList = AttachBL.storeEmailAttachments(emailAttachmentList,
                        workItemBean.getObjectID());
                if (isCreate) {
                    AttachBL.replaceEmailInlineImagesDescription(workItemBean.getObjectID(),
                            emailAttachmentList, emailAttachmentIDList);
                } else {
                    workItemBean.setComment(AttachBL.replaceInlineImagesTextMail(emailAttachmentList,
                            emailAttachmentIDList, workItemID, workItemBean.getComment()));
                }
                haveNewAttachments = true;
            }
            if (haveNewAttachments) {
                //add the attachments of the workItem to the attachments index
                List<TAttachmentBean> attachments = AttachBL.getAttachments(workItemBean.getObjectID());
                if (attachments != null && !attachments.isEmpty()) {
                    for (TAttachmentBean attachmentBean : attachments) {
                        if (LOGGER.isDebugEnabled()) {
                            LOGGER.debug("Add attachment " + attachmentBean.getObjectID()
                                    + " to the new workItem " + workItemBean.getObjectID());
                        }
                        AttachmentIndexer.getInstance().addToIndex(attachmentBean, true);
                        //possible lucene update in other cluster nodes
                        ClusterMarkChangesBL.markDirtyAttachmentInCluster(attachmentBean.getObjectID(),
                                CHANGE_TYPE.ADD_TO_INDEX);
                    }
                }
            }
        }
    }

    /**
     * Prepare the argument for saving  the item
     */
    AfterItemSaveEventParam afterItemSaveEventParam = new AfterItemSaveEventParam();
    afterItemSaveEventParam.setWorkItemNew(workItemBean);
    afterItemSaveEventParam.setWorkItemOld(workItemBeanOriginal);
    afterItemSaveEventParam.setFieldConfigs(FieldRuntimeBL.getFieldConfigsMap(workItemBean.getProjectID(),
            workItemBean.getListTypeID(), locale));
    afterItemSaveEventParam.setLocale(locale);
    //don't forget to set the interesting fields (setInterestingFields()) in the context of
    //the actual operation before calling the getLocalizedFieldChanges()
    Set<Integer> interestingFieldsForHistory = new HashSet<Integer>();
    List<Integer> longFields = getLongFields(presentFields);
    SortedMap<Integer, FieldChange> fieldsChangesMap;
    boolean systemDateChanged = false;
    boolean startDateChanged = false;
    boolean requestedStartDateChanged = false;
    boolean endDateChanged = false;
    boolean requestedEndDateChanged = false;
    boolean parentChanged = false;
    if (isCreate || isCopy) {
        //send create event
        if (isCreate) {
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_CREATE));
        }
        //send copy event
        if (isCopy) {
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_COPY));
        }
        //for the history text for the new/copied items we are interested only in Comment for history
        //(the rare case when the Comment field is present on the create screen)
        interestingFieldsForHistory = new HashSet<Integer>();
        if (isCreate) {
            //create
            interestingFieldsForHistory.add(SystemFields.INTEGER_COMMENT);
        } else {
            //copy: we need mainly the state change for history
            interestingFieldsForHistory = copyPresentFieldsForHistory(presentFields);
            Set<Integer> excludeFields = getExcludeFieldsForHistory();
            interestingFieldsForHistory.removeAll(excludeFields);
        }
        afterItemSaveEventParam.setInterestingFields(interestingFieldsForHistory);
        fieldsChangesMap = HistorySaverBL.getLocalizedFieldChanges(afterItemSaveEventParam, locale, true);
    } else {
        //for the history text for existing items we are interested in the change of any field present
        //in the screen except some special system fields
        interestingFieldsForHistory = copyPresentFieldsForHistory(presentFields);
        Set<Integer> excludeFields = getExcludeFieldsForHistory();
        interestingFieldsForHistory.removeAll(excludeFields);
        afterItemSaveEventParam.setInterestingFields(interestingFieldsForHistory);
        fieldsChangesMap = HistorySaverBL.getLocalizedFieldChanges(afterItemSaveEventParam, locale, true);
        /*--------------------------------------------------------------------------------------------------------------
         * send field specific events: state change, move, date change, assignResponsible, assignManager, addComment, "general" update
         *--------------------------------------------------------------------------------------------------------------*/
        // status change: send events for either "close" or "reopen" or "general status change"
        if (workItemBeanOriginal != null && workItemBean.getStateID() != null
                && workItemBeanOriginal.getStateID() != null
                && workItemBean.getStateID().intValue() != workItemBeanOriginal.getStateID().intValue()) {
            TStateBean stateBeanNew = LookupContainer.getStatusBean(workItemBean.getStateID());
            TStateBean stateBeanOld = LookupContainer.getStatusBean(workItemBeanOriginal.getStateID());
            Integer newStatusFlag = stateBeanNew.getStateflag();
            Integer oldStatusFlag = stateBeanOld.getStateflag();
            if (newStatusFlag.intValue() == 1 && oldStatusFlag.intValue() != 1) {
                //send close event
                events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_CLOSE));
            } else {
                //send reopen event
                if (newStatusFlag.intValue() != 1 && oldStatusFlag.intValue() == 1) {
                    events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_REOPEN));
                }
            }
            //just "ordinary" state change event
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_CHANGESTATUS));
            excludeFields.add(SystemFields.INTEGER_STATE);
        }
        //move event
        FieldChange projectChange = fieldsChangesMap.get(SystemFields.INTEGER_PROJECT);
        FieldChange issueTypeChange = fieldsChangesMap.get(SystemFields.INTEGER_ISSUETYPE);
        if (projectChange != null && projectChange.isChanged()
                || issueTypeChange != null && issueTypeChange.isChanged()) {
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_MOVE));
            excludeFields.add(SystemFields.INTEGER_PROJECT);
            excludeFields.add(SystemFields.INTEGER_ISSUETYPE);
        }
        //date change event
        FieldChange startDateChange = fieldsChangesMap.get(SystemFields.INTEGER_STARTDATE);
        FieldChange endDateChange = fieldsChangesMap.get(SystemFields.INTEGER_ENDDATE);
        FieldChange requestedStartDateChange = fieldsChangesMap.get(SystemFields.INTEGER_TOP_DOWN_START_DATE);
        FieldChange requestedEndDateChange = fieldsChangesMap.get(SystemFields.INTEGER_TOP_DOWN_END_DATE);
        startDateChanged = startDateChange != null && startDateChange.isChanged();
        endDateChanged = endDateChange != null && endDateChange.isChanged();
        requestedStartDateChanged = requestedStartDateChange != null && requestedStartDateChange.isChanged();
        requestedEndDateChanged = requestedEndDateChange != null && requestedEndDateChange.isChanged();
        if (startDateChanged || endDateChanged || requestedStartDateChanged || requestedEndDateChanged) {
            systemDateChanged = true;
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_CHANGEDATE));
            excludeFields.add(SystemFields.INTEGER_STARTDATE);
            excludeFields.add(SystemFields.INTEGER_ENDDATE);
            excludeFields.add(SystemFields.INTEGER_TOP_DOWN_START_DATE);
            excludeFields.add(SystemFields.INTEGER_TOP_DOWN_END_DATE);
        }
        //parent change
        FieldChange parentChange = fieldsChangesMap.get(SystemFields.INTEGER_SUPERIORWORKITEM);
        parentChanged = parentChange != null && parentChange.isChanged();
        //responsible change event
        FieldChange responsibleChange = fieldsChangesMap.get(SystemFields.INTEGER_RESPONSIBLE);
        if (responsibleChange != null && responsibleChange.isChanged()) {
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_ASSIGNRESPONSIBLE));
            excludeFields.add(SystemFields.INTEGER_RESPONSIBLE);
        }
        //manager change event
        FieldChange managerChange = fieldsChangesMap.get(SystemFields.INTEGER_MANAGER);
        if (managerChange != null && managerChange.isChanged()) {
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_ASSIGNMANAGER));
            excludeFields.add(SystemFields.INTEGER_MANAGER);
        }
        //comment event
        FieldChange commentAddedChange = fieldsChangesMap.get(SystemFields.INTEGER_COMMENT);
        if (commentAddedChange != null) {
            if (fieldChangeID == null) {
                events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_ADDCOMMENT));
            } else {
                String newComment = commentAddedChange.getNewShowValue();
                if (newComment != null && !"".equals(newComment)) {
                    events.add(new Integer(IEventSubscriber.EVENT_POST_ISSUE_EDITCOMMENT));
                } else {
                    events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_DELETECOMMENT));
                }
            }
            excludeFields.add(SystemFields.INTEGER_COMMENT);
        }
        //attachment add event
        FieldChange attachmentAdd = fieldsChangesMap.get(SystemFields.INTEGER_ATTACHMENT_ADD_HISTORY_FIELD);
        if (attachmentAdd != null && attachmentAdd.isChanged()) {
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_ADDATTACHMENT));
            excludeFields.add(SystemFields.INTEGER_ATTACHMENT_ADD_HISTORY_FIELD);
        }
        //attachment modify event
        FieldChange attachmentModify = fieldsChangesMap
                .get(SystemFields.INTEGER_ATTACHMENT_MODIFY_HISTORY_FIELD);
        if (attachmentModify != null && attachmentModify.isChanged()) {
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_MODIFYATTACHMENT));
            excludeFields.add(SystemFields.INTEGER_ATTACHMENT_MODIFY_HISTORY_FIELD);
        }
        //attachment remove event
        FieldChange attachmentDelete = fieldsChangesMap
                .get(SystemFields.INTEGER_ATTACHMENT_DELETE_HISTORY_FIELD);
        if (attachmentDelete != null && attachmentDelete.isChanged()) {
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_REMOVEATTACHMENT));
            excludeFields.add(SystemFields.INTEGER_ATTACHMENT_DELETE_HISTORY_FIELD);
        }
        //any other field change which was not excluded previously
        if (hasChanged(workItemBean, workItemBeanOriginal, presentFields, excludeFields, isCopy)) {
            events.add(Integer.valueOf(IEventSubscriber.EVENT_POST_ISSUE_UPDATE));
        }
    }
    boolean mightTriggerEmail = HistorySaverBL.saveHistory(afterItemSaveEventParam, locale, personID,
            longFields, isCreate, isCopy, fieldChangeID);
    //save consultants/informants and budgets/efforts/costs for create mode
    //(in edit mode they are saved directly in the database,
    //but in create mode we have a workItemKey first now)
    if (isCreate && !isCopy) {
        //save consultants/informants from session to db
        ConsInfShow consInfShow = workItemContext.getConsInfShow();
        if (consInfShow != null) {
            //the cons/inf tab was at least once selected (the consInfShow was initialized)
            RaciRoleBL.saveFromSessionToDb(workItemBean.getObjectID(), consInfShow);
        }
        //save budgets, costs/efforts from session to db
        AccountingForm accountingForm = workItemContext.getAccountingForm();
        if (accountingForm != null) {
            //the accounting tab was at least once selected (the accountingForm was initialized)
            AccountingBL.saveAllFromSessionToDb(accountingForm, workItemContext.getWorkItemBean(),
                    LookupContainer.getPersonBean(personID));
        }
        SortedMap<Integer, TWorkItemLinkBean> workItemsLinksMap = workItemContext.getWorkItemsLinksMap();
        if (workItemsLinksMap != null) {
            ItemLinkBL.saveAllFromSessionToDb(workItemBean.getObjectID(), workItemsLinksMap);
        }
    }
    //adjust the ancestor item's bottom up dates to the dates of this child if it is the earliest/latest
    if (parentChanged || (parentID != null
            && (systemDateChanged || isCreate || isCopy || archivedOrDeletedOriginal != archivedOrDeleted))) {
        /**
         * Possible ancestor bottom up start or end date changes:
         * 1. parent change
         * 2. start or end date changed in leaf issue or create/copy of a new issue
         * 3. archived/deleted flag changed
         */
        Set<Integer> bottomUpFields = null;
        Map<Integer, Object> newValuesMap = new HashMap<Integer, Object>();
        Map<Integer, Object> oldValuesMap = new HashMap<Integer, Object>();
        if (parentChanged) {
            bottomUpFields = FieldRuntimeBL.getPossibleBottomUpFields();
            if (originalParentID != null) {
                //similar as the child date would be set to null for the old parent: recalculate of bottom up dates for the original parent
                //set only the oldValuesMap
                if (workItemBeanOriginal != null) {
                    for (Integer fieldID : bottomUpFields) {
                        oldValuesMap.put(fieldID, workItemBeanOriginal.getAttribute(fieldID));
                    }
                }
                actualizeAncestorBottomUpDate(originalParentID, bottomUpFields, newValuesMap, oldValuesMap,
                        LookupContainer.getPersonBean(personID), locale);
            }
            if (parentID != null) {
                //similar as a new child workItem would be created for parentID
                //set only the newValuesMap
                for (Integer fieldID : bottomUpFields) {
                    newValuesMap.put(fieldID, workItemBean.getAttribute(fieldID));
                }
                actualizeAncestorBottomUpDate(parentID, bottomUpFields, newValuesMap, oldValuesMap,
                        LookupContainer.getPersonBean(personID), locale);
            }
        } else {
            if (isCreate || isCopy || archivedOrDeletedOriginal != archivedOrDeleted) {
                bottomUpFields = FieldRuntimeBL.getPossibleBottomUpFields();
            } else {
                bottomUpFields = new HashSet<Integer>();
                if (startDateChanged) {
                    bottomUpFields.add(SystemFields.INTEGER_STARTDATE);
                }
                if (endDateChanged) {
                    bottomUpFields.add(SystemFields.INTEGER_ENDDATE);
                }
                if (requestedStartDateChanged) {
                    bottomUpFields.add(SystemFields.INTEGER_TOP_DOWN_START_DATE);
                }
                if (requestedEndDateChanged) {
                    bottomUpFields.add(SystemFields.INTEGER_TOP_DOWN_END_DATE);
                }
            }
            for (Integer fieldID : bottomUpFields) {
                newValuesMap.put(fieldID, workItemBean.getAttribute(fieldID));
                if (workItemBeanOriginal != null) {
                    oldValuesMap.put(fieldID, workItemBeanOriginal.getAttribute(fieldID));
                }
            }
            actualizeAncestorBottomUpDate(parentID, bottomUpFields, newValuesMap, oldValuesMap,
                    LookupContainer.getPersonBean(personID), locale);
        }
    }
    if (ApplicationBean.getInstance().getSiteBean().getSummaryItemsBehavior()) {
        if (parentChanged || archivedOrDeletedOriginal != archivedOrDeleted) {
            /**
             * Possible ancestor planned value changes:
             * 1. parent change
             * 2. archived/deleted flag changed
             */
            AccountingBL.actualizeAncestorValues(workItemBean, parentID, originalParentID, personID);
        }
    }

    //add inline items link
    List<Integer> inlineItems = workItemContext.getInlineItems();
    //Saving inlineItems
    if (inlineItems != null && !inlineItems.isEmpty()) {
        InlineItemLinkBL.saveInlineItemLinks(inlineItems, workItemContext.getWorkItemBean().getObjectID(),
                workItemContext.getRootItemID());
    }

    //set the mails only after the consultants/informants are also set because they should also receive the email
    EventPublisher eventPublisher = EventPublisher.getInstance();
    if (eventPublisher != null && withNotify && mightTriggerEmail) {
        afterItemSaveEventParam.setInterestingFields(
                getInterestingFieldsForMail(fieldsChangesMap, workItemBean.getObjectID()));
        eventPublisher.notify(events, afterItemSaveEventParam);
    }
    return saveNeeded;
}