Example usage for java.util HashSet remove

List of usage examples for java.util HashSet remove

Introduction

In this page you can find the example usage for java.util HashSet remove.

Prototype

public boolean remove(Object o) 

Source Link

Document

Removes the specified element from this set if it is present.

Usage

From source file:com.ikanow.aleph2.analytics.services.TestAnalyticsContext.java

@Test
public void test_streamingPipeline() throws JsonProcessingException, IOException, InterruptedException {
    _logger.info("running test_streamingPipeline");

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final AnalyticsContext test_context = _app_injector.getInstance(AnalyticsContext.class);

    final AnalyticThreadJobBean.AnalyticThreadJobInputBean analytic_input1 = BeanTemplateUtils
            .build(AnalyticThreadJobBean.AnalyticThreadJobInputBean.class)
            .with(AnalyticThreadJobBean.AnalyticThreadJobInputBean::data_service, "search_index_service").done()
            .get();//from w w w  .j ava  2s  .c  o  m
    final AnalyticThreadJobBean.AnalyticThreadJobInputBean analytic_input2 = BeanTemplateUtils
            .build(AnalyticThreadJobBean.AnalyticThreadJobInputBean.class)
            .with(AnalyticThreadJobBean.AnalyticThreadJobInputBean::data_service, "storage_service").done()
            .get();

    final AnalyticThreadJobBean.AnalyticThreadJobOutputBean analytic_output = BeanTemplateUtils
            .build(AnalyticThreadJobBean.AnalyticThreadJobOutputBean.class)
            .with(AnalyticThreadJobBean.AnalyticThreadJobOutputBean::transient_type,
                    MasterEnrichmentType.streaming)
            .with(AnalyticThreadJobBean.AnalyticThreadJobOutputBean::is_transient, true).done().get();

    final AnalyticThreadJobBean.AnalyticThreadJobOutputBean analytic_output_no_streaming = BeanTemplateUtils
            .build(AnalyticThreadJobBean.AnalyticThreadJobOutputBean.class)
            .with(AnalyticThreadJobBean.AnalyticThreadJobOutputBean::is_transient, true).done().get();

    final AnalyticThreadJobBean analytic_job1 = BeanTemplateUtils.build(AnalyticThreadJobBean.class)
            .with(AnalyticThreadJobBean::name, "test1")
            .with(AnalyticThreadJobBean::analytic_technology_name_or_id, "test_analytic_tech_id")
            .with(AnalyticThreadJobBean::inputs, Arrays.asList(analytic_input1, analytic_input2))
            .with(AnalyticThreadJobBean::output, analytic_output)
            .with(AnalyticThreadJobBean::library_names_or_ids, Arrays.asList("id1", "name2")).done().get();

    final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::_id, "test").with(DataBucketBean::full_name, "/TEST/ANALYICS/CONTEXT")
            .with(DataBucketBean::analytic_thread,
                    BeanTemplateUtils.build(AnalyticThreadBean.class)
                            .with(AnalyticThreadBean::jobs, Arrays.asList(analytic_job1)).done().get())
            .with("data_schema", BeanTemplateUtils.build(DataSchemaBean.class)
                    .with("search_index_schema",
                            BeanTemplateUtils.build(DataSchemaBean.SearchIndexSchemaBean.class).done().get())
                    .done().get())
            .done().get();

    final SharedLibraryBean library = BeanTemplateUtils.build(SharedLibraryBean.class)
            .with(SharedLibraryBean::path_name, "/test/lib").done().get();
    test_context.setTechnologyConfig(library);

    test_context._distributed_services.createTopic(
            BucketUtils.getUniqueSignature("/TEST/ANALYICS/CONTEXT", Optional.of("test1")), Optional.empty());

    String message1 = "{\"key\":\"val\"}";
    String message2 = "{\"key\":\"val2\"}";
    String message3 = "{\"key\":\"val3\"}";
    String message4 = "{\"key\":\"val4\"}";
    Map<String, Object> msg3 = ImmutableMap.<String, Object>builder().put("key", "val3").build();
    Map<String, Object> msg4 = ImmutableMap.<String, Object>builder().put("key", "val4").build();
    //currently mock cds produce does nothing
    try {
        test_context.sendObjectToStreamingPipeline(Optional.empty(), analytic_job1,
                Either.left(mapper.readTree(message1)), Optional.empty());
        fail("Should fail, bucket not set and not specified");
    } catch (Exception e) {
    }
    test_context.setBucket(test_bucket);
    assertEquals(test_bucket, test_context.getBucket().get());
    Iterator<String> iter = test_context._distributed_services.consumeAs(
            BucketUtils.getUniqueSignature("/TEST/ANALYICS/CONTEXT", Optional.of("test1")), Optional.empty(),
            Optional.empty());

    test_context.sendObjectToStreamingPipeline(Optional.empty(), analytic_job1,
            Either.left(mapper.readTree(message1)), Optional.empty());
    test_context.sendObjectToStreamingPipeline(Optional.of(test_bucket), analytic_job1,
            Either.left(mapper.readTree(message2)), Optional.empty());
    test_context.sendObjectToStreamingPipeline(Optional.empty(), analytic_job1, Either.right(msg3),
            Optional.empty());
    test_context.sendObjectToStreamingPipeline(Optional.of(test_bucket), analytic_job1, Either.right(msg4),
            Optional.empty());

    //(just send a quick message out on a different job name so it will fail silently)
    test_context.sendObjectToStreamingPipeline(Optional.of(test_bucket),
            BeanTemplateUtils.clone(analytic_job1).with("name", "different").done(), Either.right(msg4),
            Optional.empty());
    //(just send a quick message out with streaming turned off so it will fail silently)
    test_context.sendObjectToStreamingPipeline(Optional.of(test_bucket),
            BeanTemplateUtils.clone(analytic_job1).with("output", analytic_output_no_streaming).done(),
            Either.right(msg4), Optional.empty());

    try {
        test_context.sendObjectToStreamingPipeline(Optional.empty(), analytic_job1,
                Either.left(mapper.readTree(message1)),
                Optional.of(BeanTemplateUtils.build(AnnotationBean.class).done().get()));
        fail("Should fail, annotation specified");
    } catch (Exception e) {
    }

    final HashSet<String> mutable_set = new HashSet<>(Arrays.asList(message1, message2, message3, message4));

    //nothing will be in consume
    Thread.sleep(5000); //wait a few seconds for producers to dump batch

    long count = 0;
    while (iter.hasNext()) {
        String msg = iter.next();
        assertTrue("Sent this message: " + msg, mutable_set.remove(msg));
        count++;
    }
    assertEquals(4, count);
}

From source file:org.kuali.kfs.sys.context.DataDictionaryConfigurationTest.java

public void testAllDataDictionaryDocumentTypesExistInWorkflowDocumentTypeTable() throws Exception {
    HashSet<String> workflowDocumentTypeNames = new HashSet<String>();
    DataSource mySource = SpringContext.getBean(DataSource.class);
    Connection dbCon = null;//w  ww.  jav a  2 s .  co  m
    try {

        dbCon = mySource.getConnection();
        Statement dbAsk = dbCon.createStatement();
        ResultSet dbAnswer = dbAsk.executeQuery("select DOC_TYP_NM from KREW_DOC_TYP_T where CUR_IND = 1");
        while (dbAnswer.next()) {
            String docName = dbAnswer.getString(1);
            if (StringUtils.isNotBlank(docName)) {
                workflowDocumentTypeNames.add(docName);
            }
        }

    } catch (Exception e) {
        throw (e);
    }
    // Using HashSet since duplicate objects would otherwise be returned
    HashSet<DocumentEntry> documentEntries = new HashSet<DocumentEntry>(
            dataDictionary.getDocumentEntries().values());
    List<String> ddEntriesWithMissingTypes = new ArrayList<String>();
    for (DocumentEntry documentEntry : documentEntries) {
        String name = documentEntry.getDocumentTypeName();
        String testName = new String(" ");
        if (documentEntry instanceof FinancialSystemMaintenanceDocumentEntry) {
            testName = ((FinancialSystemMaintenanceDocumentEntry) documentEntry).getBusinessObjectClass()
                    .getName();
        } else {
            testName = documentEntry.getDocumentClass().getName();
        }
        if (!workflowDocumentTypeNames.contains(name) && !"RiceUserMaintenanceDocument".equals(name)
                && !testName.contains("rice")) {
            ddEntriesWithMissingTypes.add(name);
        } else {
            workflowDocumentTypeNames.remove(name);
        }
    }

    if (workflowDocumentTypeNames.size() > 0) {
        try {
            //If documents are parent docs, then they aren't superfluous.
            String queryString = "select distinct doc_typ_nm from KREW_DOC_TYP_T"
                    + " where doc_typ_id in (select parnt_id from KREW_DOC_TYP_T" + " where actv_ind = 1"
                    + " and cur_ind = 1)";
            Statement dbAsk = dbCon.createStatement();
            ResultSet dbAnswer = dbAsk.executeQuery(queryString);
            while (dbAnswer.next()) {
                String docName = dbAnswer.getString(1);
                if (StringUtils.isNotBlank(docName)) {
                    workflowDocumentTypeNames.remove(docName);
                }
            }
        } catch (Exception e) {
            throw (e);
        }

        System.err.print("superfluousTypesDefinedInWorkflowDatabase: " + workflowDocumentTypeNames);
    }
    assertEquals("documentTypesNotDefinedInWorkflowDatabase: " + ddEntriesWithMissingTypes, 0,
            ddEntriesWithMissingTypes.size());
}

From source file:org.apache.hadoop.hive.ql.parse.CommonSubtreeDetect.java

private void removeCommonPart(HashSet<List<Object>> commonList) {
    HashSet<List<Object>> commonl = new HashSet<List<Object>>();
    Iterator<List<Object>> listIter = commonList.iterator();
    while (listIter.hasNext()) {
        List<Object> l = listIter.next();
        Iterator<List<Object>> tmpIter = commonList.iterator();
        while (tmpIter.hasNext()) {
            List<Object> tmpl = tmpIter.next();
            if (!l.equals(tmpl) && l.containsAll(tmpl)) {
                commonl.add(tmpl);/*from  w  w w  .  j av a  2s.  co m*/
            }
        }
        //remove list that don't include operator
        boolean visitedOp = false;
        for (int i = 0; i < l.size(); i++) {
            if (l.get(i) instanceof Operator<?>) {
                visitedOp = true;
                break;
            }
        }
        if (visitedOp == false) {
            listIter.remove();
            continue;
        }
    }

    //merge common sub optree
    for (List<Object> list : commonl) {
        commonList.remove(list);
    }
}

From source file:com.zimbra.perf.chart.ChartUtil.java

private void doit() throws Exception {
    List<ChartSettings> allSettings = getAllChartSettings(mConfs);
    readCsvFiles();//w w w .java 2s .  c o  m
    List<ChartSettings> outDocSettings = new ArrayList<ChartSettings>();
    HashSet<String> outDocNames = new HashSet<String>();
    for (Iterator<ChartSettings> i = allSettings.iterator(); i.hasNext();) {
        ChartSettings cs = i.next();
        mCharts.addAll(createJFReeChart(cs));
        if (cs.getOutDocument() == null || cs.getGroupPlots().size() == 0)
            computeAggregates(cs, mAggregateStartAt, mAggregateEndAt);
        else if (cs.getOutDocument() != null) {
            outDocSettings.add(cs);
            outDocNames.add(cs.getOutDocument());
            i.remove();
        }
    }
    for (ChartSettings cs : mSyntheticChartSettings) {
        computeAggregates(cs, mAggregateStartAt, mAggregateEndAt);
        outDocNames.add(cs.getOutDocument());
    }
    outDocSettings.addAll(mSyntheticChartSettings);
    outDocNames.remove(null); // lazy, instead of checking for null above

    lineUpAxes();
    writeAllCharts(allSettings, outDocNames);
    writeOutDocCharts(mSyntheticChartSettings, outDocNames);
    if (!mSkipSummary)
        writeSummary(allSettings);
}

From source file:org.alfresco.web.forms.FormInstanceDataImpl.java

public List<FormInstanceData.RegenerateResult> regenerateRenditions() throws FormNotFoundException {
    if (logger.isDebugEnabled()) {
        logger.debug("regenerating renditions of " + this);
    }//from   w ww. j a v a  2  s .  c om

    AVMLockingService avmLockService = this.getServiceRegistry().getAVMLockingService();
    final AVMService avmService = this.getServiceRegistry().getAVMService();
    PropertyValue pv = avmService.getNodeProperty(AVMNodeConverter.ToAVMVersionPath(this.nodeRef).getFirst(),
            AVMNodeConverter.ToAVMVersionPath(this.nodeRef).getSecond(), WCMAppModel.PROP_ORIGINAL_PARENT_PATH);

    String originalParentAvmPath = (pv == null) ? AVMNodeConverter.SplitBase(this.getPath())[0]
            : pv.getStringValue();

    final HashSet<RenderingEngineTemplate> allRets = new HashSet<RenderingEngineTemplate>(
            this.getForm().getRenderingEngineTemplates());
    final List<RegenerateResult> result = new LinkedList<RegenerateResult>();
    // regenerate existing renditions
    String path = null;

    for (final Rendition r : this.getRenditions()) {
        // Try to skip renditions without rendering engine template.
        if (r instanceof RenditionImpl) {
            RenditionImpl rImpl = (RenditionImpl) r;
            RenderingEngineTemplate ret = rImpl.getRenderingEngineTemplate();
            if ((ret != null) && (ret instanceof RenderingEngineTemplateImpl)) {
                RenderingEngineTemplateImpl retImpl = (RenderingEngineTemplateImpl) ret;
                if (!retImpl.isExists()) {
                    continue;
                }
            }

        }
        final RenderingEngineTemplate ret = r.getRenderingEngineTemplate();
        if (ret == null || !allRets.contains(ret)) {
            continue;
        }

        String lockOwner = null;
        try {
            if (logger.isDebugEnabled()) {
                logger.debug("regenerating rendition " + r + " using template " + ret);
            }

            path = r.getPath();
            lockOwner = avmLockService.getLockOwner(AVMUtil.getStoreId(path),
                    AVMUtil.getStoreRelativePath(path));
            if (lockOwner != null) {
                if (logger.isDebugEnabled()) {
                    logger.debug("Lock already exists for " + path);
                }
            }

            ret.render(this, r);
            allRets.remove(ret);
            result.add(new RegenerateResult(ret, path, r, lockOwner));
        } catch (Exception e) {
            result.add(new RegenerateResult(ret, path, e, lockOwner));

            // remove lock if there wasn't one before
            if (lockOwner == null) {
                avmLockService.removeLock(AVMUtil.getStoreId(path), AVMUtil.getStoreRelativePath(path));

                if (logger.isDebugEnabled()) {
                    logger.debug("Removed lock for " + path + " as it failed to generate");
                }
            }
        }
    }

    // get current username for lock checks
    String username = Application.getCurrentUser(FacesContext.getCurrentInstance()).getUserName();

    // render all renditions for newly added templates
    for (final RenderingEngineTemplate ret : allRets) {
        String lockOwner = null;
        String currentLockStore = null;
        boolean lockModified = false;

        try {
            path = ret.getOutputPathForRendition(this, originalParentAvmPath,
                    getName().replaceAll("(.+)\\..*", "$1"));

            if (logger.isDebugEnabled()) {
                logger.debug("regenerating rendition of " + this.getPath() + " at " + path + " using template "
                        + ret);
            }

            String storeId = AVMUtil.getStoreId(path);
            String storePath = AVMUtil.getStoreRelativePath(path);
            String storeName = AVMUtil.getStoreName(path);

            Map<String, String> lockData = avmLockService.getLockData(storeId, storePath);
            if (lockData != null) {
                lockOwner = avmLockService.getLockOwner(storeId, storePath);
                currentLockStore = lockData.get(WCMUtil.LOCK_KEY_STORE_NAME);
            }

            if (lockOwner != null) {
                if (logger.isDebugEnabled()) {
                    logger.debug("Lock already exists for " + path);
                }

                if (currentLockStore.equals(storeName) == false) {
                    if (lockOwner.equals(username)) {
                        lockModified = true;

                        // lock already exists on path, check it's owned by the current user
                        if (logger.isDebugEnabled()) {
                            logger.debug("transferring lock from " + currentLockStore + " to " + storeName
                                    + " for path: " + path);
                        }

                        lockData.put(WCMUtil.LOCK_KEY_STORE_NAME, storeName);
                        avmLockService.modifyLock(storeId, storePath, lockOwner, storeId, storePath, lockData);
                    }
                }
            }

            result.add(new RegenerateResult(ret, path, ret.render(this, path), lockOwner));
        } catch (Exception e) {
            result.add(new RegenerateResult(ret, path, e, lockOwner));

            String storeId = AVMUtil.getStoreId(path);
            String storePath = AVMUtil.getStoreRelativePath(path);
            String storeName = AVMUtil.getStoreName(path);

            if (lockOwner == null) {
                // remove lock if there wasn't one before
                avmLockService.removeLock(storeId, storePath);

                if (logger.isDebugEnabled()) {
                    logger.debug("Removed lock for " + path + " as it failed to generate");
                }
            } else if (lockModified) {
                if (logger.isDebugEnabled()) {
                    logger.debug("transferring lock from " + storeName + " to " + currentLockStore
                            + " for path: " + path);
                }

                Map<String, String> lockData = avmLockService.getLockData(storeId, storePath);
                lockData.put(WCMUtil.LOCK_KEY_STORE_NAME, currentLockStore);
                avmLockService.modifyLock(storeId, storePath, lockOwner, storeId, storePath, lockData);
            }
        }
    }
    return result;
}

From source file:org.openanzo.datasource.nodecentric.internal.NodeCentricDatasource.java

private boolean checkIfTablesExists(Connection connection, boolean none) throws AnzoException {
    ResultSet rs = null;//from  ww w.jav a 2  s . com
    try {
        long currentVersion = none ? SCHEMA_VERSION : getCurrentVersion(connection);

        boolean tables = true;
        boolean sequences = false;
        boolean views = false;
        try {
            rs = connection.getMetaData().getTableTypes();
            while (rs.next() && (!tables || !sequences || !views)) {
                String type = rs.getString(1);
                if (type.toUpperCase().equals(table)) {
                    tables = true;
                } else if (type.toUpperCase().equals(seq)) {
                    sequences = true;
                } else if (type.toUpperCase().equals(view)) {
                    views = true;
                }
            }
        } finally {
            if (rs != null) {
                rs.close();
            }
        }
        if (tables) {
            try {
                rs = connection.getMetaData().getTables(null, null, null, new String[] { table });

                HashSet<String> requiredTables = new HashSet<String>();
                requiredTables.add(serverUpper);
                java.util.Collections.addAll(requiredTables, resetService.getRequiredTables());
                java.util.Collections.addAll(requiredTables, resetService.getNodeCentricTables());
                while (rs.next()) {
                    String tbl = rs.getString(3);
                    if (requiredTables.remove(tbl.toUpperCase()) && none) {
                        throw new AnzoException(ExceptionConstants.RDB.INCOMPLETE_DATABASE);
                    }
                    if (tbl.toUpperCase().equals("ANZO_U")) {
                        ResultSet metadata = connection.getMetaData().getColumns(null, null, tbl, null);
                        while (metadata.next()) {
                            String name = metadata.getString(4);
                            if (name.toUpperCase().equals("VALUE")) {
                                int size = metadata.getInt(7);
                                configuration.setMaxLongObjectLength(size);
                                nodeLayout.setMaxLength(size);
                                break;
                            }
                        }
                    }
                }
                if (!none && requiredTables.size() > 0) {
                    throw new AnzoException(ExceptionConstants.RDB.FAILED_GETTING_TABLE_STATUS,
                            Arrays.toString(requiredTables.toArray()));
                }
            } finally {
                if (rs != null) {
                    rs.close();
                }
            }
        }
        if (sequences) {
            String seqs[][] = resetService.getRequiredSequences();
            for (int i = 0; i < currentVersion; i++) {
                String vseq[] = seqs[i];
                if (vseq != null && vseq.length > 0) {
                    try {
                        rs = connection.getMetaData().getTables(null, null, null, new String[] { seq });
                        HashSet<String> requiredSeq = new HashSet<String>();
                        java.util.Collections.addAll(requiredSeq, vseq);
                        while (rs.next()) {
                            String tbl = rs.getString(3);
                            if (requiredSeq.remove(tbl.toUpperCase()) && none) {
                                throw new AnzoException(ExceptionConstants.RDB.INCOMPLETE_DATABASE);
                            }
                        }
                        if (!none && requiredSeq.size() > 0) {
                            throw new AnzoException(ExceptionConstants.RDB.FAILED_GETTING_TABLE_STATUS,
                                    Arrays.toString(requiredSeq.toArray()));
                        }
                    } finally {
                        if (rs != null) {
                            rs.close();
                        }
                    }
                }
            }
        }
        if (views) {
            try {
                rs = connection.getMetaData().getTables(null, null, null, new String[] { view });

                HashSet<String> required = new HashSet<String>();
                if (currentVersion < 12) {
                    required.add("ALL_STMTS_VIEW");
                } else {
                    java.util.Collections.addAll(required, resetService.getRequiredViews());
                }
                while (rs.next()) {
                    String tbl = rs.getString(3);
                    if (required.remove(tbl.toUpperCase()) && none) {
                        throw new AnzoException(ExceptionConstants.RDB.INCOMPLETE_DATABASE);
                    }
                }
                if (!none && required.size() > 0) {
                    throw new AnzoException(ExceptionConstants.RDB.FAILED_GETTING_TABLE_STATUS,
                            Arrays.toString(required.toArray()));
                }
            } finally {
                if (rs != null) {
                    rs.close();
                }
            }
        }
    } catch (SQLException e) {
        log.error(LogUtils.RDB_MARKER, "Error checking if statements exist", e);
        throw new AnzoException(ExceptionConstants.RDB.FAILED_INITIALZE_DB, e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                log.debug(LogUtils.RDB_MARKER, "Error closing result set", e);

            }
        }
    }
    return true;
}

From source file:de.unijena.bioinf.FragmentationTreeConstruction.computation.FragmentationPatternAnalysis.java

/**
 * Step 6: Decomposition/*from  ww  w .  j  ava 2  s.co m*/
 * Decompose each peak as well as the parent peak
 */
public ProcessedInput performDecomposition(ProcessedInput input) {
    final FormulaConstraints constraints = input.getMeasurementProfile().getFormulaConstraints();
    final Ms2Experiment experiment = input.getExperimentInformation();
    final Deviation parentDeviation = input.getMeasurementProfile().getAllowedMassDeviation();
    // sort again...
    final ArrayList<ProcessedPeak> processedPeaks = new ArrayList<ProcessedPeak>(input.getMergedPeaks());
    Collections.sort(processedPeaks, new ProcessedPeak.MassComparator());
    final ProcessedPeak parentPeak = processedPeaks.get(processedPeaks.size() - 1);
    // decompose peaks
    final PeakAnnotation<DecompositionList> decompositionList = input
            .getOrCreatePeakAnnotation(DecompositionList.class);
    final MassToFormulaDecomposer decomposer = decomposers.getDecomposer(constraints.getChemicalAlphabet());
    final Ionization ion = experiment.getPrecursorIonType().getIonization();
    final Deviation fragmentDeviation = input.getMeasurementProfile().getAllowedMassDeviation();
    final List<MolecularFormula> pmds = decomposer.decomposeToFormulas(
            experiment.getPrecursorIonType().subtractIonAndAdduct(parentPeak.getOriginalMz()), parentDeviation,
            constraints);
    // add adduct to molecular formula of the ion - because the adduct might get lost during fragmentation
    {
        final MolecularFormula adduct = experiment.getPrecursorIonType().getAdduct();
        final ListIterator<MolecularFormula> iter = pmds.listIterator();
        while (iter.hasNext()) {
            final MolecularFormula f = iter.next();
            iter.set(f.add(adduct));
        }
    }
    decompositionList.set(parentPeak, DecompositionList.fromFormulas(pmds));
    int j = 0;
    for (ProcessedPeak peak : processedPeaks.subList(0, processedPeaks.size() - 1)) {
        peak.setIndex(j++);
        decompositionList.set(peak, DecompositionList.fromFormulas(
                decomposer.decomposeToFormulas(peak.getUnmodifiedMass(), fragmentDeviation, constraints)));
    }
    parentPeak.setIndex(processedPeaks.size() - 1);
    assert parentPeak == processedPeaks.get(processedPeaks.size() - 1);
    // important: for each two peaks which are within 2*massrange:
    //  => make decomposition list disjoint
    final Deviation window = fragmentDeviation.multiply(2);
    for (int i = 1; i < processedPeaks.size() - 1; ++i) {
        if (window.inErrorWindow(processedPeaks.get(i).getMz(), processedPeaks.get(i - 1).getMz())) {
            final HashSet<MolecularFormula> right = new HashSet<MolecularFormula>(
                    decompositionList.get(processedPeaks.get(i)).getFormulas());
            final ArrayList<MolecularFormula> left = new ArrayList<MolecularFormula>(
                    decompositionList.get(processedPeaks.get(i - 1)).getFormulas());
            final double leftMass = ion.subtractFromMass(processedPeaks.get(i - 1).getMass());
            final double rightMass = ion.subtractFromMass(processedPeaks.get(i).getMass());
            final Iterator<MolecularFormula> leftIter = left.iterator();
            while (leftIter.hasNext()) {
                final MolecularFormula leftFormula = leftIter.next();
                if (right.contains(leftFormula)) {
                    if (Math.abs(leftFormula.getMass() - leftMass) < Math
                            .abs(leftFormula.getMass() - rightMass)) {
                        right.remove(leftFormula);
                    } else {
                        leftIter.remove();
                    }
                }
            }
            decompositionList.set(processedPeaks.get(i - 1), DecompositionList.fromFormulas(left));
            decompositionList.set(processedPeaks.get(i), DecompositionList.fromFormulas(right));
        }
    }

    return postProcess(PostProcessor.Stage.AFTER_DECOMPOSING, input);
}

From source file:sh.isaac.convert.rxnorm.standard.RxNormMojo.java

/**
 * If sabList is null or empty, no sab filtering is done.
 *
 * @throws Exception the exception/*from   w  ww  . j a va2 s  .c o m*/
 */
private void init() throws Exception {
    clearTargetFiles();

    final String fileNameDatePortion = loadDatabase();
    final SimpleDateFormat sdf = new SimpleDateFormat("MMddyyyy");
    final long defaultTime = sdf.parse(fileNameDatePortion).getTime();

    this.abbreviationExpansions = AbbreviationExpansion
            .load(getClass().getResourceAsStream("/RxNormAbbreviationsExpansions.txt"));
    this.mapToIsa.put("isa", false);
    this.mapToIsa.put("inverse_isa", false);

    // not translating this one to isa for now
    // mapToIsa.add("CHD");
    this.mapToIsa.put("tradename_of", false);
    this.mapToIsa.put("has_tradename", false);

    // Cleanup the sabsToInclude list
    final HashSet<String> temp = new HashSet<>();

    if (this.sabsToInclude != null) {
        this.sabsToInclude.forEach((s) -> {
            temp.add(s.toUpperCase());
        });
    }

    temp.add("RXNORM");

    if (temp.contains(this.sctSab)) {
        this.linkSnomedCT = true;
        temp.remove(this.sctSab);
    } else {
        this.linkSnomedCT = false;
    }

    this.sabsToInclude = new ArrayList<>();
    this.sabsToInclude.addAll(temp);
    new File(this.inputFileLocation, "ibdf").listFiles((FileFilter) pathname -> {
        return RxNormMojo.this.linkSnomedCT && pathname.isFile()
                && pathname.getName().toLowerCase().endsWith(".ibdf");
    });
    this.importUtil = new IBDFCreationUtility(Optional.empty(), Optional.of(MetaData.RXNORM_MODULES____SOLOR),
            this.outputDirectory, this.converterOutputArtifactId, this.converterOutputArtifactVersion,
            this.converterOutputArtifactClassifier, false, defaultTime);
    this.metaDataRoot = ComponentReference.fromConcept(
            this.importUtil.createConcept("RxNorm Metadata" + IBDFCreationUtility.METADATA_SEMANTIC_TAG, true,
                    MetaData.SOLOR_CONTENT_METADATA____SOLOR.getPrimordialUuid()));
    loadMetaData();
    this.importUtil.loadTerminologyMetadataAttributes(this.metaDataRoot, this.converterSourceArtifactVersion,
            Optional.of(fileNameDatePortion), this.converterOutputArtifactVersion,
            Optional.ofNullable(this.converterOutputArtifactClassifier), this.converterVersion);
    ConsoleUtil.println("Metadata Statistics");
    this.importUtil.getLoadStats().getSummary().forEach((s) -> {
        ConsoleUtil.println(s);
    });
    this.importUtil.clearLoadStats();
    this.satRelStatement = this.db.getConnection().prepareStatement("select * from " + this.tablePrefix
            + "SAT where RXAUI" + "= ? and STYPE='RUI' and " + createSabQueryPart("", this.linkSnomedCT));
    this.hasTTYType = this.db.getConnection()
            .prepareStatement("select count (*) as count from RXNCONSO where rxcui=? and TTY=? and "
                    + createSabQueryPart("", this.linkSnomedCT));

    if (this.linkSnomedCT) {
        prepareSCTMaps();
    }
}

From source file:com.vmware.bdd.manager.ClusterManager.java

private void checkExtraRequiredPackages() {
    logger.info("check if extra required packages(mailx and wsdl4j) have been installed for Ironfan.");
    if (!extraPackagesExisted) {
        File yumRepoPath = new File(Constants.SERENGETI_YUM_REPO_PATH);

        // use hs to record the packages that have not been added
        final HashSet<String> hs = new HashSet<String>();
        hs.addAll(extraRequiredPackages);

        // scan the files under the serengeti yum repo directory
        File[] rpmList = yumRepoPath.listFiles(new FileFilter() {
            @Override/* w  w  w. j ava2 s  .  c o  m*/
            public boolean accept(File f) {
                String fname = f.getName();
                int idx = fname.indexOf("-");

                if (idx > 0) {
                    String packName = fname.substring(0, idx);
                    if (extraRequiredPackages.contains(packName)) {
                        String regx = packName + commRegex;
                        Pattern pat = Pattern.compile(regx);
                        if (pat.matcher(fname).matches()) {
                            hs.remove(packName);
                            return true;
                        }
                    }
                }
                return false;
            }
        });

        if (!hs.isEmpty()) {
            logger.info("cannot find all the needed packages, stop and return error now. ");
            throw BddException.EXTRA_PACKAGES_NOT_FOUND(hs.toString());
        }

        logger.info("the check is successful: all needed packages are there.");
        extraPackagesExisted = true;
    }
}

From source file:org.apache.hadoop.hbase.master.procedure.TestMasterProcedureSchedulerConcurrency.java

/**
 * Verify that "write" operations for a single table are serialized,
 * but different tables can be executed in parallel.
 *//*from www .j  ava2  s  .co m*/
@Test(timeout = 60000)
public void testConcurrentWriteOps() throws Exception {
    final TestTableProcSet procSet = new TestTableProcSet(queue);

    final int NUM_ITEMS = 10;
    final int NUM_TABLES = 4;
    final AtomicInteger opsCount = new AtomicInteger(0);
    for (int i = 0; i < NUM_TABLES; ++i) {
        TableName tableName = TableName.valueOf(String.format("testtb-%04d", i));
        for (int j = 1; j < NUM_ITEMS; ++j) {
            procSet.addBack(new TestTableProcedure(i * 100 + j, tableName,
                    TableProcedureInterface.TableOperationType.EDIT));
            opsCount.incrementAndGet();
        }
    }
    assertEquals(opsCount.get(), queue.size());

    final Thread[] threads = new Thread[NUM_TABLES * 2];
    final HashSet<TableName> concurrentTables = new HashSet<TableName>();
    final ArrayList<String> failures = new ArrayList<String>();
    final AtomicInteger concurrentCount = new AtomicInteger(0);
    for (int i = 0; i < threads.length; ++i) {
        threads[i] = new Thread() {
            @Override
            public void run() {
                while (opsCount.get() > 0) {
                    try {
                        Procedure proc = procSet.acquire();
                        if (proc == null) {
                            queue.signalAll();
                            if (opsCount.get() > 0) {
                                continue;
                            }
                            break;
                        }

                        TableName tableId = procSet.getTableName(proc);
                        synchronized (concurrentTables) {
                            assertTrue("unexpected concurrency on " + tableId, concurrentTables.add(tableId));
                        }
                        assertTrue(opsCount.decrementAndGet() >= 0);
                        try {
                            long procId = proc.getProcId();
                            int concurrent = concurrentCount.incrementAndGet();
                            assertTrue("inc-concurrent=" + concurrent + " 1 <= concurrent <= " + NUM_TABLES,
                                    concurrent >= 1 && concurrent <= NUM_TABLES);
                            LOG.debug("[S] tableId=" + tableId + " procId=" + procId + " concurrent="
                                    + concurrent);
                            Thread.sleep(2000);
                            concurrent = concurrentCount.decrementAndGet();
                            LOG.debug("[E] tableId=" + tableId + " procId=" + procId + " concurrent="
                                    + concurrent);
                            assertTrue("dec-concurrent=" + concurrent, concurrent < NUM_TABLES);
                        } finally {
                            synchronized (concurrentTables) {
                                assertTrue(concurrentTables.remove(tableId));
                            }
                            procSet.release(proc);
                        }
                    } catch (Throwable e) {
                        LOG.error("Failed " + e.getMessage(), e);
                        synchronized (failures) {
                            failures.add(e.getMessage());
                        }
                    } finally {
                        queue.signalAll();
                    }
                }
            }
        };
        threads[i].start();
    }
    for (int i = 0; i < threads.length; ++i) {
        threads[i].join();
    }
    assertTrue(failures.toString(), failures.isEmpty());
    assertEquals(0, opsCount.get());
    assertEquals(0, queue.size());

    for (int i = 1; i <= NUM_TABLES; ++i) {
        final TableName table = TableName.valueOf(String.format("testtb-%04d", i));
        final TestTableProcedure dummyProc = new TestTableProcedure(100, table,
                TableProcedureInterface.TableOperationType.DELETE);
        assertTrue("queue should be deleted, table=" + table, queue.markTableAsDeleted(table, dummyProc));
    }
}