Example usage for java.util Collection clear

List of usage examples for java.util Collection clear

Introduction

In this page you can find the example usage for java.util Collection clear.

Prototype

void clear();

Source Link

Document

Removes all of the elements from this collection (optional operation).

Usage

From source file:de.hybris.platform.test.MediaTest.java

@Test
public void testPermittedPrincipals() throws JaloBusinessException {
    final UserRight userRight = AccessManager.getInstance().getOrCreateUserRightByCode("read");
    final Media m = MediaManager.getInstance().createMedia("media123");
    final Collection<Principal> existingGrantedPrincipals = m.getPermittedPrincipals();
    assertThat(existingGrantedPrincipals).isEmpty();

    final Collection<Principal> principalsToBePermitted = new HashSet<Principal>();
    final User testUser1 = UserManager.getInstance().createUser("testUser1");
    final User testUser2 = UserManager.getInstance().createUser("testUser2");
    principalsToBePermitted.add(testUser1);
    principalsToBePermitted.add(testUser2);

    m.setPermittedPrincipals(principalsToBePermitted);
    assertCollection(principalsToBePermitted, m.getPermittedPrincipals());

    //check the positive permissions
    assertTrue(m.getPermissions(testUser1, false).contains(userRight.getPK()));
    assertTrue(m.getPermissions(testUser2, false).contains(userRight.getPK()));

    principalsToBePermitted.clear();
    final User testUser3 = UserManager.getInstance().createUser("testUser3");
    principalsToBePermitted.add(testUser3);
    m.setPermittedPrincipals(principalsToBePermitted);
    assertCollection(principalsToBePermitted, m.getPermittedPrincipals());
    assertTrue(m.getPermissions(testUser3, false).contains(userRight.getPK()));
}

From source file:com.github.tteofili.looseen.yay.SGM.java

static double evaluate(SGM network) throws Exception {
    double cc = 0;
    double wc = 0;
    int window = network.configuration.window;
    List<String> vocabulary = network.getVocabulary();
    Collection<Integer> exps = new LinkedList<>();
    Collection<Integer> acts = new LinkedList<>();
    for (Sample sample : network.samples) {
        double[] inputs = sample.getInputs();
        int j = 0;
        for (int i = 0; i < window - 1; i++) {
            int le = inputs.length;
            int actualMax = getMaxIndex(network.predictOutput(inputs), j, j + le - 1);
            int expectedMax = getMaxIndex(sample.getOutputs(), j, j + le - 1);
            exps.add(expectedMax % le);// w w w.  j av  a2 s. co  m
            acts.add(actualMax % le);
            j += le;
        }
        boolean c = true;
        for (Integer e : exps) {
            c &= acts.remove(e);
        }
        if (c) {
            cc++;
            String x = vocabulary.get(getMaxIndex(inputs, 0, inputs.length));
            StringBuilder y = new StringBuilder();
            for (int e : exps) {
                if (y.length() > 0) {
                    y.append(" ");
                }
                y.append(vocabulary.get(e));
            }
            System.out.println("matched : " + x + " -> " + y);
        } else {
            wc++;
        }
        acts.clear();
        exps.clear();
        if (cc + wc > 2000)
            break;
    }
    return (cc / (wc + cc));
}

From source file:com.wingnest.play2.origami.plugin.OrigamiPlugin.java

private void registerGraphClasses() {
    final OGraphDatabase db = GraphDB.open();
    try {/* w  w  w  .  jav  a2s .c  om*/
        debug("Registering Graph Classes");

        final Set<Class<GraphVertexModel>> vertexClasses = new HashSet<Class<GraphVertexModel>>();
        final Set<Class<GraphEdgeModel>> edgeClasses = new HashSet<Class<GraphEdgeModel>>();

        for (String pkg : models) {
            vertexClasses.addAll(getSubTypesOf(pkg, GraphVertexModel.class));
            edgeClasses.addAll(getSubTypesOf(pkg, GraphEdgeModel.class));
        }

        @SuppressWarnings("unchecked")
        final Collection<Class<?>> javaClasses = CollectionUtils.union(vertexClasses, edgeClasses);

        final Class<?>[] javaClassArray = javaClasses.toArray(new Class<?>[0]);
        Arrays.sort(javaClassArray, new Comparator<Class<?>>() {
            @Override
            public int compare(Class<?> o1, Class<?> o2) {
                if (o1.equals(o2))
                    return 0;
                if (o1.isAssignableFrom(o2))
                    return -1;
                if (o2.isAssignableFrom(o1))
                    return 1;
                int o1cnt = calSuperclassCount(o1);
                int o2cnt = calSuperclassCount(o2);
                return (o1cnt - o2cnt);
            }
        });

        javaClasses.clear();
        javaClasses.addAll(Arrays.asList(javaClassArray));

        final OSchema schema = db.getMetadata().getSchema();
        for (final Class<?> javaClass : javaClasses) {
            final String entityName = javaClass.getSimpleName();
            final OClass oClass;
            if (GraphVertexModel.class.isAssignableFrom(javaClass)) {
                final String className = javaClass.getSimpleName();
                debug("Entity: %s", className);
                if (schema.existsClass(className)) {
                    oClass = schema.getClass(className);
                } else {
                    oClass = db.createVertexType(className);
                }
                graphEntityMap.put(className, javaClass);
                final Class<?> sclass = javaClass.getSuperclass();
                if (javaClasses.contains(sclass)) {
                    final OClass sClass = db.getMetadata().getSchema().getClass(sclass.getSimpleName());
                    db.getMetadata().getSchema().getClass(entityName).setSuperClass(sClass);
                }
            } else if (GraphEdgeModel.class.isAssignableFrom(javaClass)) {
                final String className = javaClass.getSimpleName();
                debug("Entity: %s", className);
                if (schema.existsClass(className)) {
                    oClass = schema.getClass(className);
                } else {
                    oClass = db.createEdgeType(className);
                }
                graphEntityMap.put(className, javaClass);
                final Class<?> sclass = javaClass.getSuperclass();
                if (javaClasses.contains(sclass)) {
                    final OClass sClass = db.getMetadata().getSchema().getClass(sclass.getSimpleName());
                    db.getMetadata().getSchema().getClass(entityName).setSuperClass(sClass);
                }
            } else {
                throw new IllegalStateException("bug!?");
            }
            maintainProperties(oClass, javaClass);
        }
        debug("Registering Database Listeners");
        for (final Class<? extends ODatabaseListener> listener : getSubTypesOf("listeners",
                ODatabaseListener.class)) {
            debug("Listener: %s", listener.getName());
            GraphDB.getListeners().add(listener);
        }
        debug("Registering Record Hooks");
        for (final Class<? extends ORecordHook> hook : getSubTypesOf("hooks", ORecordHook.class)) {
            debug("Hook: %s", hook.getName());
            GraphDB.getGraphHooks().add(hook);
        }
    } catch (Exception e) {
        throw new OrigamiUnexpectedException(e);
    } finally {
        db.close();
    }
}

From source file:org.apache.mahout.ga.watchmaker.cd.tool.CDInfosToolTest.java

@Test
public void testGatherInfos() throws Exception {
    int n = 1; // put a greater value when you search for some nasty bug
    for (int nloop = 0; nloop < n; nloop++) {
        int maxattr = 100; // max number of attributes
        int nbattrs = rng.nextInt(maxattr) + 1;

        // random descriptors
        double numRate = rng.nextDouble();
        double catRate = rng.nextDouble() * (1.0 - numRate);
        Descriptors descriptors = randomDescriptors(nbattrs, numRate, catRate);

        // random descriptions
        Object[][] descriptions = randomDescriptions(descriptors);

        // random dataset
        Path inpath = getTestTempDirPath("input");
        Path output = getTestTempDirPath("output");
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(inpath.toUri(), conf);
        HadoopUtil.delete(conf, inpath);

        randomDataset(fs, inpath, descriptors, descriptions);

        // Start the tool
        List<String> result = Lists.newArrayList();
        fs.delete(output, true); // It's unhappy if this directory exists
        CDInfosTool.gatherInfos(descriptors, inpath, output, result);

        // check the results
        Collection<String> target = Lists.newArrayList();

        assertEquals(nbNonIgnored(descriptors), result.size());
        int rindex = 0;
        for (int index = 0; index < nbattrs; index++) {
            if (descriptors.isIgnored(index)) {
                continue;
            }//from  w ww. j  a v a 2  s. com

            String description = result.get(rindex++);

            if (descriptors.isNumerical(index)) {
                // numerical attribute
                double min = (Double) descriptions[index][0];
                double max = (Double) descriptions[index][1];
                double[] range = DescriptionUtils.extractNumericalRange(description);

                assertTrue("bad min value for attribute (" + index + ')', min <= range[0]);
                assertTrue("bad max value for attribute (" + index + ')', max >= range[1]);
            } else if (descriptors.isNominal(index)) {
                // categorical attribute
                Object[] values = descriptions[index];
                target.clear();
                DescriptionUtils.extractNominalValues(description, target);

                assertEquals(values.length, target.size());
                assertTrue(target.containsAll(Arrays.asList(values)));
            }
        }
    }
}

From source file:org.datavec.cli.transforms.text.nlp.TfidfTextVectorizerTransform.java

/**
 * Transform the raw record w stats we've learned from the first pass
 * //from   w  ww . j  a  v  a  2s  . c  o m
 * Schema:
 * 
 *       Writable[0]: go dogs, go 1
 *       Writable[1]: label_A
 * 
 * 1. take the raw record, split off the label and look up its ID from the cache
 * 2. take the remaining part of the raw record and run it through the 
 * 
 */
@Override
public void transform(Collection<Writable> vector) {

    String label;
    Integer labelID;

    String textRecord;

    // 1. extract label and sentence from vector

    if (vector.size() != 2) {
        // throw some illegal state something here
        return;
    }

    textRecord = vector.toArray()[0].toString();
    label = vector.toArray()[1].toString();

    // 2. get the label ID

    labelID = this.getLabelID(label);

    // 3. get the converted vector
    //   System.out.print( "Label: " + label + " ");
    //   INDArray tfidfVector = this.convertSentenceToTFIDFVector( textRecord );
    INDArray tfidfVector = this.convertTextRecordToTFIDFVector(textRecord);

    //   System.out.println( "cols: " + tfidfVector.columns() );
    //   System.out.println( "rows: " + tfidfVector.rows() );

    // 4. rebuild the vector refernece w the schema { vector entries, ..., label }

    // 4.a. clear out old entries
    vector.clear();

    // 4.b. rebuild

    for (int colID = 0; colID < tfidfVector.columns(); colID++) {

        vector.add(new DoubleWritable(tfidfVector.getDouble(0, colID)));

    }

    // we always append a label

    vector.add(new DoubleWritable(labelID));

}

From source file:org.lockss.test.LockssTestCase.java

/** Assert that a collection cannot be modified, <i>ie</i>, that all of
 * the following methods, plus the collection's iterator().remove()
 * method, throw UnsupportedOperationException: add(), addAll(), clear(),
 * remove(), removeAll(), retainAll() */

public static void assertUnmodifiable(Collection coll) {
    List list = ListUtil.list("bar");
    try {//from   ww  w  .  j  a v  a2 s .c  om
        coll.add("foo");
        fail("add() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.addAll(list);
        fail("addAll() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.clear();
        fail("clear() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.remove("foo");
        fail("remove() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.removeAll(list);
        fail("removeAll() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.retainAll(list);
        fail("retainAll() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    Iterator iter = coll.iterator();
    iter.next();
    try {
        iter.remove();
        fail("iterator().remove() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
}

From source file:ca.on.oicr.pde.workflows.GATK3Workflow.java

@Override
public void buildWorkflow() {

    final String binDir = this.getWorkflowBaseDir() + "/bin/";
    final Boolean manualOutput = BooleanUtils.toBoolean(getProperty("manual_output"), "true", "false");
    final String queue = getOptionalProperty("queue", "");
    final String perl = getProperty("perl");
    final String java = getProperty("java");
    final String tabixDir = getProperty("tabix_dir");
    final String gatk = getOptionalProperty("gatk_jar", binDir);
    final String mergeVCFScript = binDir + "sw_module_merge_GATK_VCF.pl";
    final String gatkKey = getProperty("gatk_key");
    final String identifier = getProperty("identifier");
    final String refFasta = getProperty("ref_fasta");
    final String dbsnpVcf = getProperty("gatk_dbsnp_vcf");
    final Integer intervalPadding = hasPropertyAndNotNull("interval_padding")
            ? Integer.parseInt(getProperty("interval_padding"))
            : null;/*from   w w  w.  j a  v  a 2 s. com*/
    final String standCallConf = getProperty("stand_call_conf");
    final String standEmitConf = getProperty("stand_emit_conf");
    final Integer downsamplingCoverage = hasPropertyAndNotNull("downsampling_coverage")
            ? Integer.parseInt(getProperty("downsampling_coverage"))
            : null;
    final String downsamplingType = getOptionalProperty("downsampling_type", null);
    final Integer gatkHaplotypeCallerThreads = Integer.parseInt(getProperty("gatk_haplotype_caller_threads"));
    final Integer gatkHaplotypeCallerXmx = Integer.parseInt(getProperty("gatk_haplotype_caller_xmx"));
    final Integer gatkUnifiedGenotyperXmx = Integer.parseInt(getProperty("gatk_unified_genotyper_xmx"));
    final Integer gatkUnifiedGenotyperThreads = Integer.parseInt(getProperty("gatk_unified_genotyper_threads"));
    final Integer gatkOverhead = Integer.parseInt(getProperty("gatk_sched_overhead_mem"));
    final String haplotypeCallerParams = getOptionalProperty("gatk_haplotype_caller_params", null);
    final String unifiedGenotyperParams = getOptionalProperty("gatk_unified_genotyper_params", null);

    final List<String> intervalFilesList = Arrays
            .asList(StringUtils.split(getOptionalProperty("interval_files", ""), ","));
    final Set<String> intervalFiles = new HashSet<>(intervalFilesList);
    if (intervalFiles.size() != intervalFilesList.size()) {
        throw new RuntimeException("Duplicate interval_files detected");
    }

    final Set<String> chrSizes;
    if (hasProperty("chr_sizes")) {
        //chr_sizes has been set
        List<String> chrSizesList = Arrays.asList(StringUtils.split(getProperty("chr_sizes"), ","));
        chrSizes = new HashSet<>(chrSizesList);
        if (chrSizes.size() != chrSizesList.size()) {
            throw new RuntimeException("Duplicate chr_sizes detected.");
        }
    } else if (!intervalFiles.isEmpty()) {
        //chr_sizes not set, interval_files has been set - use interval files to calculate chrSizes
        try {
            chrSizes = BEDFileUtils.getChromosomes(intervalFiles);
        } catch (IOException ioe) {
            throw new RuntimeException(ioe);
        }
    } else {
        //chr_sizes and interval_files not set - can not calculate chrSizes
        chrSizes = new HashSet<>();
    }

    // one chrSize record is required, null will result in no parallelization
    if (chrSizes.isEmpty()) {
        chrSizes.add(null);
    }

    Multimap<VariantCaller, Pair<String, Job>> snvFiles = HashMultimap.create();
    Multimap<VariantCaller, Pair<String, Job>> indelFiles = HashMultimap.create();
    Multimap<VariantCaller, Pair<String, Job>> finalFiles = HashMultimap.create();
    for (String chrSize : chrSizes) {
        for (VariantCaller vc : variantCallers) {
            String workingDir = DATADIR + vc.toString() + "/";
            switch (vc) {
            case HAPLOTYPE_CALLER:
                //GATK Haplotype Caller ( https://www.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_gatk_tools_walkers_haplotypecaller_HaplotypeCaller.php )
                HaplotypeCaller haplotypeCallerCommand = new HaplotypeCaller.Builder(java,
                        Integer.toString(gatkHaplotypeCallerXmx) + "g", TMPDIR, gatk, gatkKey, DATADIR)
                                .setInputBamFiles(inputBamFiles).setReferenceSequence(refFasta)
                                .setDbsnpFilePath(dbsnpVcf).setStandardCallConfidence(standCallConf)
                                .setStandardEmitConfidence(standEmitConf)
                                .setGenotypingMode(
                                        getOptionalProperty("haplotype_caller_genotyping_mode", null))
                                .setOutputMode(getOptionalProperty("haplotype_caller_output_mode", null))
                                .setOperatingMode(HaplotypeCaller.OperatingMode.VCF).addInterval(chrSize)
                                .addIntervalFiles(intervalFiles).setIntervalPadding(intervalPadding)
                                .setDownsamplingCoverageThreshold(downsamplingCoverage)
                                .setDownsamplingType(downsamplingType)
                                .setOutputFileName(
                                        "gatk" + (chrSize != null ? "." + chrSize.replace(":", "-") : ""))
                                .setNumCpuThreadsPerDataThread(gatkHaplotypeCallerThreads)
                                .setExtraParameters(haplotypeCallerParams).build();
                Job haplotypeCallerJob = this.getWorkflow().createBashJob("GATKHaplotypeCaller")
                        .setMaxMemory(Integer.toString((gatkHaplotypeCallerXmx + gatkOverhead) * 1024))
                        .setQueue(queue);
                haplotypeCallerJob.getCommand().setArguments(haplotypeCallerCommand.getCommand());

                finalFiles.put(vc, Pair.of(haplotypeCallerCommand.getOutputFile(), haplotypeCallerJob));
                break;

            case UNIFIED_GENOTYPER:
                //GATK Unified Genotyper (INDELS) ( https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_genotyper_UnifiedGenotyper.php )
                UnifiedGenotyper indelsUnifiedGenotyperCommand = new UnifiedGenotyper.Builder(java,
                        Integer.toString(gatkUnifiedGenotyperXmx) + "g", TMPDIR, gatk, gatkKey, workingDir)
                                .setInputBamFiles(inputBamFiles).setReferenceSequence(refFasta)
                                .setDbsnpFilePath(dbsnpVcf).setStandardCallConfidence(standCallConf)
                                .setStandardEmitConfidence(standEmitConf).setGenotypeLikelihoodsModel("INDEL")
                                .setGroup("Standard").addInterval(chrSize).addIntervalFiles(intervalFiles)
                                .setIntervalPadding(intervalPadding)
                                .setDownsamplingCoverageThreshold(downsamplingCoverage)
                                .setDownsamplingType(downsamplingType)
                                .setOutputFileName(
                                        "gatk" + (chrSize != null ? "." + chrSize.replace(":", "-") : ""))
                                .setNumCpuThreadsPerDataThread(gatkUnifiedGenotyperThreads)
                                .setExtraParameters(unifiedGenotyperParams).build();
                Job indelsUnifiedGenotyperJob = this.getWorkflow().createBashJob("GATKUnifiedGenotyperIndel")
                        .setMaxMemory(Integer.toString((gatkUnifiedGenotyperXmx + gatkOverhead) * 1024))
                        .setQueue(queue);
                indelsUnifiedGenotyperJob.getCommand().setArguments(indelsUnifiedGenotyperCommand.getCommand());

                indelFiles.put(vc,
                        Pair.of(indelsUnifiedGenotyperCommand.getOutputFile(), indelsUnifiedGenotyperJob));

                //GATK Unified Genotyper (SNVS) ( https://www.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_gatk_tools_walkers_genotyper_UnifiedGenotyper.php )
                UnifiedGenotyper snvsUnifiedGenotyperCommand = new UnifiedGenotyper.Builder(java,
                        Integer.toString(gatkUnifiedGenotyperXmx) + "g", TMPDIR, gatk, gatkKey, workingDir)
                                .setInputBamFiles(inputBamFiles).setReferenceSequence(refFasta)
                                .setDbsnpFilePath(dbsnpVcf).setStandardCallConfidence(standCallConf)
                                .setStandardEmitConfidence(standEmitConf).setGenotypeLikelihoodsModel("SNP")
                                .addInterval(chrSize).addIntervalFiles(intervalFiles)
                                .setIntervalPadding(intervalPadding)
                                .setDownsamplingCoverageThreshold(downsamplingCoverage)
                                .setDownsamplingType(downsamplingType)
                                .setOutputFileName(
                                        "gatk" + (chrSize != null ? "." + chrSize.replace(":", "-") : ""))
                                .setNumCpuThreadsPerDataThread(gatkUnifiedGenotyperThreads)
                                .setExtraParameters(unifiedGenotyperParams).build();
                Job snvsUnifiedGenotyperJob = this.getWorkflow().createBashJob("GATKUnifiedGenotyperSNV")
                        .setMaxMemory(Integer.toString((gatkUnifiedGenotyperXmx + gatkOverhead) * 1024))
                        .setQueue(queue);
                snvsUnifiedGenotyperJob.getCommand().setArguments(snvsUnifiedGenotyperCommand.getCommand());

                snvFiles.put(vc, Pair.of(snvsUnifiedGenotyperCommand.getOutputFile(), snvsUnifiedGenotyperJob));
                break;

            default:
                throw new RuntimeException("Unsupported mode: " + variantCallers.toString());
            }
        }
    }

    for (VariantCaller vc : variantCallers) {
        Collection<Pair<String, Job>> snvs = snvFiles.get(vc);
        Collection<Pair<String, Job>> indels = indelFiles.get(vc);
        Collection<Pair<String, Job>> all = finalFiles.get(vc);
        String workingDir = DATADIR + vc.toString() + "/";

        if (!snvs.isEmpty()) {
            MergeVcf mergeSnvsCommand = new MergeVcf.Builder(perl, mergeVCFScript, workingDir)
                    .addInputFiles(getLeftCollection(snvs)).build();
            Job mergeSnvsJob = this.getWorkflow().createBashJob("MergeRawSNVs").setMaxMemory("4096")
                    .setQueue(queue);
            mergeSnvsJob.getParents().addAll(getRightCollection(snvs));
            mergeSnvsJob.getCommand().setArguments(mergeSnvsCommand.getCommand());

            snvs.clear();
            snvs.add(Pair.of(mergeSnvsCommand.getOutputFile(), mergeSnvsJob));
        }

        if (!indels.isEmpty()) {
            MergeVcf mergeIndelsCommand = new MergeVcf.Builder(perl, mergeVCFScript, workingDir)
                    .addInputFiles(getLeftCollection(indels)).build();
            Job mergeIndelsJob = this.getWorkflow().createBashJob("MergeRawIndels").setMaxMemory("4096")
                    .setQueue(queue);
            mergeIndelsJob.getParents().addAll(getRightCollection(indels));
            mergeIndelsJob.getCommand().setArguments(mergeIndelsCommand.getCommand());

            indels.clear();
            indels.add(Pair.of(mergeIndelsCommand.getOutputFile(), mergeIndelsJob));
        }

        if (!snvs.isEmpty() && !indels.isEmpty() && all.isEmpty()) {
            MergeVcf mergeFinalCommand = new MergeVcf.Builder(perl, mergeVCFScript, workingDir)
                    .addInputFiles(getLeftCollection(snvs)).addInputFiles(getLeftCollection(indels)).build();
            Job mergeFinalJob = this.getWorkflow().createBashJob("MergeFinal").setMaxMemory("4096")
                    .setQueue(queue);
            mergeFinalJob.getParents().addAll(getRightCollection(snvs));
            mergeFinalJob.getParents().addAll(getRightCollection(indels));
            mergeFinalJob.getCommand().setArguments(mergeFinalCommand.getCommand());

            all.add(Pair.of(mergeFinalCommand.getOutputFile(), mergeFinalJob));
        } else if (snvs.isEmpty() && indels.isEmpty() && !all.isEmpty()) {
            if (all.size() > 1) {
                MergeVcf mergeFinalCommand = new MergeVcf.Builder(perl, mergeVCFScript, workingDir)
                        .addInputFiles(getLeftCollection(all)).build();
                Job mergeFinalJob = this.getWorkflow().createBashJob("MergeFinal").setMaxMemory("4096")
                        .setQueue(queue);
                mergeFinalJob.getParents().addAll(getRightCollection(all));
                mergeFinalJob.getCommand().setArguments(mergeFinalCommand.getCommand());

                all.clear();
                all.add(Pair.of(mergeFinalCommand.getOutputFile(), mergeFinalJob));
            } else {
                //there is one vcf, no need to merge
            }
        } else {
            throw new RuntimeException(
                    String.format("Unexpected state: snvs file = [%s], indels size = [%s], final size = [%s]",
                            snvs.size(), indels.size(), all.size()));
        }

        //Sort and compress the final vcf
        SortVcf sortVcfCommand = new SortVcf.Builder(workingDir)
                .setInputFile(Iterables.getOnlyElement(getLeftCollection(all)))
                .setOutputFileName(identifier + "." + StringUtils.lowerCase(vc.toString()) + ".raw").build();
        CompressAndIndexVcf compressIndexVcfCommand = new CompressAndIndexVcf.Builder(tabixDir, workingDir)
                .setInputFile(sortVcfCommand.getOutputFile()).build();
        List<String> cmd = new LinkedList<>();
        cmd.addAll(sortVcfCommand.getCommand());
        cmd.add("&&");
        cmd.addAll(compressIndexVcfCommand.getCommand());
        Job sortCompressIndexVcfJob = getWorkflow().createBashJob("SortCompressIndexVcf")
                .setMaxMemory(Integer.toString(4096)).setQueue(queue)
                .addParent(Iterables.getOnlyElement(getRightCollection(all)));
        sortCompressIndexVcfJob.getCommand().setArguments(cmd);

        //final output file
        SqwFile vcf = createOutputFile(compressIndexVcfCommand.getOutputVcfFile(), "application/vcf-4-gzip",
                manualOutput);
        SqwFile tbi = createOutputFile(compressIndexVcfCommand.getOutputTabixFile(), "application/tbi",
                manualOutput);
        vcf.getAnnotations().put(ANNOTKEY, vc.toString());
        tbi.getAnnotations().put(ANNOTKEY, vc.toString());
        sortCompressIndexVcfJob.addFile(vcf);
        sortCompressIndexVcfJob.addFile(tbi);
    }
}

From source file:com.facebook.model.GraphObjectFactoryTests.java

@SmallTest
@MediumTest/*  w ww .  ja  va 2s. co  m*/
@LargeTest
public void testCollectionClearThrows() throws JSONException {
    try {
        Collection<Integer> collection = GraphObject.Factory.createList(Integer.class);
        collection.clear();
        fail("Expected exception");
    } catch (UnsupportedOperationException exception) {
    }
}

From source file:org.itest.impl.ITestRandomObjectGeneratorImpl.java

protected Object fillCollection(Object o, Type type, Map<String, Type> map, ITestContext iTestContext) {
    ITestParamState iTestState = iTestContext.getCurrentParam();
    Collection<Object> col = (Collection<Object>) o;
    Class collectionClass;//  w w w  .java  2  s .c  om
    if (null != iTestContext.getCurrentParam()
            && null != iTestContext.getCurrentParam().getAttribute(ITestConstants.ATTRIBUTE_CLASS)) {
        collectionClass = iTestConfig.getITestValueConverter().convert(Class.class,
                iTestContext.getCurrentParam().getAttribute(ITestConstants.ATTRIBUTE_CLASS));
    } else {
        collectionClass = ITestTypeUtil.getRawClass(type);
    }
    if (!collectionClass.isInterface()) {
        col = (Collection<Object>) newInstance(collectionClass, iTestContext);
    } else {
        if (Set.class.isAssignableFrom(collectionClass)) {
            col = new HashSet<Object>();
        } else {
            col = new ArrayList<Object>();
        }
    }
    if (null == col) {
    } else {
        col.clear();
    }
    int size = random.nextInt(RANDOM_MAX - RANDOM_MIN) + RANDOM_MIN;
    if (null != iTestState && iTestState.getSizeParam() != null) {
        size = iTestState.getSizeParam();
    }

    Type elementType = ITestTypeUtil
            .getTypeProxy(ITestTypeUtil.getParameterType(type, Collection.class, 0, map), map);
    for (int i = 0; i < size; i++) {
        iTestContext.enter(col, String.valueOf(i));
        Object value;
        value = generateRandom(elementType, map, iTestContext);
        col.add(value);
        iTestContext.leave(value);
    }
    return col;
}

From source file:com.btoddb.fastpersitentqueue.FpqIT.java

@Test
public void testThreading() throws Exception {
    final int numEntries = 1000;
    final int numPushers = 4;
    final int numPoppers = 4;
    final int entrySize = 1000;
    fpq1.setMaxTransactionSize(2000);//from  w ww.ja  va  2 s.c  o m
    final int popBatchSize = 100;
    fpq1.setMaxMemorySegmentSizeInBytes(10000000);
    fpq1.setMaxJournalFileSize(10000000);
    fpq1.setMaxJournalDurationInMs(30000);
    fpq1.setFlushPeriodInMs(1000);
    fpq1.setNumberOfFlushWorkers(4);

    final Random pushRand = new Random(1000L);
    final Random popRand = new Random(1000000L);
    final AtomicInteger pusherFinishCount = new AtomicInteger();
    final AtomicInteger numPops = new AtomicInteger();
    final AtomicLong counter = new AtomicLong();
    final AtomicLong pushSum = new AtomicLong();
    final AtomicLong popSum = new AtomicLong();

    fpq1.init();

    ExecutorService execSrvc = Executors.newFixedThreadPool(numPushers + numPoppers);

    Set<Future> futures = new HashSet<Future>();

    // start pushing
    for (int i = 0; i < numPushers; i++) {
        Future future = execSrvc.submit(new Runnable() {
            @Override
            public void run() {
                for (int i = 0; i < numEntries; i++) {
                    try {
                        long x = counter.getAndIncrement();
                        pushSum.addAndGet(x);
                        ByteBuffer bb = ByteBuffer.wrap(new byte[entrySize]);
                        bb.putLong(x);

                        fpq1.beginTransaction();
                        fpq1.push(bb.array());
                        fpq1.commit();
                        if ((x + 1) % 500 == 0) {
                            System.out.println("pushed ID = " + x);
                        }
                        Thread.sleep(pushRand.nextInt(5));
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
                pusherFinishCount.incrementAndGet();
            }
        });
        futures.add(future);
    }

    // start popping
    for (int i = 0; i < numPoppers; i++) {
        Future future = execSrvc.submit(new Runnable() {
            @Override
            public void run() {
                while (pusherFinishCount.get() < numPushers || !fpq1.isEmpty()) {
                    try {
                        fpq1.beginTransaction();
                        try {
                            Collection<FpqEntry> entries = fpq1.pop(popBatchSize);
                            if (null == entries) {
                                Thread.sleep(100);
                                continue;
                            }

                            for (FpqEntry entry : entries) {
                                ByteBuffer bb = ByteBuffer.wrap(entry.getData());
                                popSum.addAndGet(bb.getLong());
                                if (entry.getId() % 500 == 0) {
                                    System.out.println("popped ID = " + entry.getId());
                                }
                            }
                            numPops.addAndGet(entries.size());
                            fpq1.commit();
                            entries.clear();
                        } finally {
                            if (fpq1.isTransactionActive()) {
                                fpq1.rollback();
                            }
                        }
                        Thread.sleep(popRand.nextInt(10));
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
        });
        futures.add(future);
    }

    boolean finished = false;
    while (!finished) {
        try {
            for (Future f : futures) {
                f.get();
            }
            finished = true;
        } catch (InterruptedException e) {
            // ignore
            Thread.interrupted();
        }
    }

    assertThat(numPops.get(), is(numEntries * numPushers));
    assertThat(fpq1.getNumberOfEntries(), is(0L));
    assertThat(pushSum.get(), is(popSum.get()));
    assertThat(fpq1.getMemoryMgr().getNumberOfActiveSegments(), is(1));
    assertThat(fpq1.getMemoryMgr().getSegments(), hasSize(1));
    assertThat(fpq1.getJournalMgr().getJournalFiles().entrySet(), hasSize(1));
    assertThat(FileUtils.listFiles(fpq1.getPagingDirectory(), TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE),
            is(empty()));
    assertThat(
            FileUtils.listFiles(fpq1.getJournalDirectory(), TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE),
            hasSize(1));
}