Example usage for java.util HashSet toArray

List of usage examples for java.util HashSet toArray

Introduction

In this page you can find the example usage for java.util HashSet toArray.

Prototype

<T> T[] toArray(T[] a);

Source Link

Document

Returns an array containing all of the elements in this set; the runtime type of the returned array is that of the specified array.

Usage

From source file:edu.unc.lib.dl.services.BatchIngestTask.java

public void init() throws BatchFailedException {
    log.info("Ingest task created for " + baseDir.getAbsolutePath());
    try {/*from   www.  j a  va2 s . co m*/
        dataDir = new File(this.getBaseDir(), "data");
        premisDir = new File(this.getBaseDir(), "premisEvents");
        ingestLog = new File(this.getBaseDir(), INGEST_LOG);
        ingestProperties = new IngestProperties(this.getBaseDir());
        foxmlFiles = this.getBaseDir().listFiles(new FilenameFilter() {
            @Override
            public boolean accept(File dir, String name) {
                return name.endsWith(".foxml");
            }
        });
        Arrays.sort(foxmlFiles, new Comparator<File>() {
            @Override
            public int compare(File o1, File o2) {
                return String.CASE_INSENSITIVE_ORDER.compare(o1.getName(), o2.getName());
            }
        });

        HashSet<PID> cSet = new HashSet<PID>();
        for (ContainerPlacement p : ingestProperties.getContainerPlacements().values()) {
            cSet.add(p.parentPID);
        }
        containers = cSet.toArray(new PID[] {});
        Arrays.sort(containers);

        this.eventLogger = new PremisEventLogger(
                ContentModelHelper.Administrative_PID.REPOSITORY_MANAGEMENT_SOFTWARE.getPID().getURI());

        this.state = STATE.CHECK;
        if (ingestLog.exists()) { // this is a resume, find next foxml
            BufferedReader r = new BufferedReader(new FileReader(ingestLog));
            String lastLine = null;
            for (String line = r.readLine(); line != null; line = r.readLine()) {
                lastLine = line;
            }
            r.close();
            if (lastLine != null) {
                // format is tab separated: <pid>\t<filename>\t<label>
                String[] l = lastLine.split("\\t");
                if (CONTAINER_UPDATED_CODE.equals(l[1])) {
                    this.state = STATE.CONTAINER_UPDATES;
                    this.lastIngestPID = new PID(l[0]);
                } else {
                    this.lastIngestFilename = l[1];
                    this.lastIngestPID = new PID(l[0]);
                    this.state = STATE.INGEST_WAIT;
                    log.info("Resuming ingest from " + this.lastIngestFilename + " in "
                            + this.getBaseDir().getName());
                }
            }
        }
        this.ingestLogWriter = new BufferedWriter(new FileWriter(ingestLog, true));
    } catch (Exception e) {
        throw fail("Cannot initialize the ingest task.", e);
    }
}

From source file:org.apache.sysml.hops.codegen.template.PlanSelectionFuseCostBased.java

@Override
public void selectPlans(CPlanMemoTable memo, ArrayList<Hop> roots) {
    //step 1: determine connected sub graphs of plans
    Collection<HashSet<Long>> parts = getConnectedSubGraphs(memo, roots);
    if (LOG.isTraceEnabled())
        LOG.trace("Connected sub graphs: " + parts.size());

    for (HashSet<Long> partition : parts) {
        //step 2: determine materialization points
        HashSet<Long> R = getPartitionRootNodes(memo, partition);
        if (LOG.isTraceEnabled())
            LOG.trace("Partition root points: " + Arrays.toString(R.toArray(new Long[0])));
        ArrayList<Long> M = getMaterializationPoints(R, partition, memo);
        if (LOG.isTraceEnabled())
            LOG.trace("Partition materialization points: " + Arrays.toString(M.toArray(new Long[0])));

        //step 3: create composite templates (within the partition)
        createAndAddMultiAggPlans(memo, partition, R);

        //step 4: plan enumeration and plan selection
        selectPlans(memo, partition, R, M);
    }/*ww w.j  ava  2  s.  c  om*/

    //step 5: add composite templates (across partitions)
    createAndAddMultiAggPlans(memo, roots);

    //take all distinct best plans
    for (Entry<Long, List<MemoTableEntry>> e : getBestPlans().entrySet())
        memo.setDistinct(e.getKey(), e.getValue());
}

From source file:org.lockss.config.TdbTitle.java

/**
 * Return the complete list of unique ISSNs for this title.
 * //from  w  w  w  .ja  v  a2  s .  co m
 * @return an array of unique ISSNs for this title
 */
public String[] getIssns() {
    HashSet<String> issns = new HashSet<String>();
    String issn = getPrintIssn();
    if (issn != null)
        issns.add(issn);
    issn = getEissn();
    if (issn != null)
        issns.add(issn);
    issn = getIssnL();
    if (issn != null)
        issns.add(issn);
    return issns.toArray(new String[issns.size()]);
}

From source file:org.mskcc.cbio.importer.config.internal.GDataImpl.java

/**
 * Function to get datatypes to download as String[]
 *
 * @param dataSourcesMetadata DataSourcesMetadata
 * @return String[]//  w  w  w.  ja  v  a  2s . c o m
 * @throws Exception
 */
@Override
public String[] getDatatypesToDownload(DataSourcesMetadata dataSourcesMetadata) throws Exception {

    HashSet<String> toReturn = new HashSet<String>();
    for (DatatypeMetadata datatypeMetadata : getDatatypeMetadata(Config.ALL)) {
        if (datatypeMetadata.isDownloaded()) {
            Method downloadArchivesMethod = datatypeMetadata
                    .getDownloadArchivesMethod(dataSourcesMetadata.getDataSource());
            toReturn.addAll((Set<String>) downloadArchivesMethod.invoke(datatypeMetadata, null));
        }
    }

    // outta here
    return toReturn.toArray(new String[0]);
}

From source file:org.strasa.middleware.manager.CreateFieldBookManagerImpl.java

/**
 * Generate factor.//from   w  w  w. ja  v  a 2 s  .c o m
 * 
 * @param sheet
 *            the sheet
 * @param lstSiteInfo
 *            the lst site info
 * @throws CreateFieldBookException
 *             the create field book exception
 * @throws Exception
 *             the exception
 */
public void generateFactor(Sheet sheet, List<SiteInformationModel> lstSiteInfo)
        throws CreateFieldBookException, Exception {

    HashSet<String> lstSet = new HashSet<String>();
    lstSet.add("SITE");
    lstSet.add("LOCATION");
    lstSet.add("YEAR");
    lstSet.add("SEASON");

    for (SiteInformationModel siteInfo : lstSiteInfo) {
        Sheet shGenotype = getExcelSheet(siteInfo.getFileGenotype(), 0);
        Sheet shLayout = getExcelSheet(siteInfo.getFileLayout(), 0);
        lstSet.addAll(readParticularRowInExcelSheet(shGenotype, 0));
        lstSet.addAll(readParticularRowInExcelSheet(shLayout, 0));
    }

    List<StudyVariable> lstVar = new StudyVariableManagerImpl()
            .getFactorVariable(Arrays.asList(lstSet.toArray(new String[lstSet.size()])));

    int col = sheet.getLastRowNum() + 2;
    writeRowFromList(
            new ArrayList<String>(Arrays.asList("FACTOR", "DESCRIPTION", "PROPERTY", "SCALE", "METHOD",
                    "DATATYPE", "       ")),
            sheet, col++,
            formatCell(IndexedColors.GREEN.getIndex(), IndexedColors.WHITE.getIndex(), (short) 200, true));
    for (StudyVariable variable : lstVar) {
        writeRowFromList(new ArrayList<String>(
                Arrays.asList(variable.getVariablecode(), variable.getDescription(), variable.getProperty(),
                        variable.getScale(), variable.getMethod(), variable.getDatatype(), "    ")),
                sheet, col++, null);

    }
}

From source file:org.tellervo.desktop.bulkdataentry.command.PopulateFromODKCommand.java

private void createCSVFile(ArrayList<ODKParser> parsers, String csvfilename) throws IOException {
    File file = new File(csvfilename);
    file.createNewFile();/*from w w w  . j av  a 2s.c  o  m*/
    if (!file.canWrite())
        throw new IOException("Cannot write to file: " + file.getAbsolutePath());

    HashSet<String> fieldNames = new HashSet<String>();
    for (ODKParser parser : parsers) {
        HashMap<String, String> fields = parser.getAllFields();
        Iterator it = fields.entrySet().iterator();
        while (it.hasNext()) {
            Map.Entry pair = (Map.Entry) it.next();
            fieldNames.add((String) pair.getKey());
        }
    }

    String[] fieldNamesArr = fieldNames.toArray(new String[fieldNames.size()]);

    String[][] table = new String[parsers.size()][fieldNames.size()];

    for (int r = 0; r < parsers.size(); r++) {
        ODKParser parser = parsers.get(r);

        for (int c = 0; c < fieldNamesArr.length; c++) {
            table[r][c] = parser.getFieldValueAsString(fieldNamesArr[c]);
        }
    }

    CSVWriter writer = new CSVWriter(new FileWriter(csvfilename), '\t');

    String[] header = fieldNames.toArray(new String[fieldNames.size()]);

    writer.writeNext(header);
    for (int r = 0; r < table.length; r++) {
        writer.writeNext(table[r]);
    }

    writer.close();

}

From source file:net.countercraft.movecraft.async.translation.TranslationTask.java

private void captureYield(MovecraftLocation[] blocksList, List<MovecraftLocation> harvestedBlocks) {
    if (harvestedBlocks.isEmpty()) {
        return;//w w w .  ja va2s . c  om
    }
    ArrayList<Inventory> chests = new ArrayList<Inventory>();
    HashSet<ItemDropUpdateCommand> itemDropUpdateSet = new HashSet<ItemDropUpdateCommand>();
    HashMap<MovecraftLocation, ItemStack[]> harvestedMap = new HashMap<MovecraftLocation, ItemStack[]>();
    //find chests
    for (MovecraftLocation loc : getCraft().getBlockList()) {
        Block block = getCraft().getW().getBlockAt(loc.getX(), loc.getY(), loc.getZ());
        if (block.getType() == Material.CHEST || block.getType() == Material.TRAPPED_CHEST)
            chests.add(((InventoryHolder) (block.getState())).getInventory());
    }

    for (MovecraftLocation harvestedBlock : harvestedBlocks) {
        Block block = getCraft().getW().getBlockAt(harvestedBlock.getX(), harvestedBlock.getY(),
                harvestedBlock.getZ());
        ItemStack[] drops = block.getDrops().toArray(new ItemStack[block.getDrops().size()]);
        //generate seed drops
        if (block.getType() == Material.CROPS) {
            Random rand = new Random();
            int amount = rand.nextInt(4);
            if (amount > 0) {
                ItemStack seeds = new ItemStack(Material.SEEDS, amount);
                HashSet<ItemStack> d = new HashSet<ItemStack>(Arrays.asList(drops));
                d.add(seeds);
                drops = d.toArray(new ItemStack[d.size()]);
            }
        }
        //get contents of inventories before deposting
        if (block.getState() instanceof InventoryHolder) {
            if (block.getState() instanceof Chest) {
                //Inventory inv = ((DoubleChest) block.getState()).getRightSide().getInventory().getLocation().equals(block.getLocation()) ?((DoubleChest) block.getState()).getRightSide().getInventory(): ((DoubleChest) block.getState()).getLeftSide().getInventory();
                //HashSet<ItemStack> d = new HashSet<ItemStack>(Arrays.asList(inv.getContents()));
                HashSet<ItemStack> d = new HashSet<ItemStack>(
                        Arrays.asList(((Chest) block.getState()).getBlockInventory().getContents()));
                d.addAll(block.getDrops());
                drops = d.toArray(new ItemStack[d.size()]);
            } else {
                HashSet<ItemStack> d = new HashSet<ItemStack>(
                        Arrays.asList((((InventoryHolder) block.getState()).getInventory().getContents())));
                d.addAll(block.getDrops());
                drops = d.toArray(new ItemStack[d.size()]);
            }
        }
        for (ItemStack drop : drops) {
            ItemStack retStack = putInToChests(drop, chests);
            if (retStack != null)
                //drop items on position
                itemDropUpdateSet.add(new ItemDropUpdateCommand(new Location(getCraft().getW(),
                        harvestedBlock.getX(), harvestedBlock.getY(), harvestedBlock.getZ()), retStack));
        }
    }
    data.setItemDropUpdates(itemDropUpdateSet.toArray(new ItemDropUpdateCommand[1]));
}

From source file:org.compass.core.lucene.engine.store.AbstractLuceneSearchEngineStore.java

public void configure(LuceneSearchEngineFactory searchEngineFactory, CompassSettings settings,
        CompassMapping mapping) {//w  w w.  j  a  v a  2  s. c  o m
    template = new LuceneStoreTemplate(this);

    this.luceneSettings = searchEngineFactory.getLuceneSettings();

    HashSet<String> subIndexesSet = new HashSet<String>();
    for (ResourceMapping resourceMapping : mapping.getRootMappings()) {
        String alias = resourceMapping.getAlias();
        String[] tempSubIndexes = resourceMapping.getSubIndexHash().getSubIndexes();
        for (String subIndex : tempSubIndexes) {
            subIndexesSet.add(subIndex.intern());

            List<String> list = subIndexesByAlias.get(alias);
            if (list == null) {
                list = new ArrayList<String>();
                subIndexesByAlias.put(alias, list);
            }
            list.add(subIndex);

            list = aliasesBySubIndex.get(subIndex);
            if (aliasesBySubIndex.get(subIndex) == null) {
                list = new ArrayList<String>();
                aliasesBySubIndex.put(subIndex, list);
            }
            list.add(alias);
        }
    }
    subIndexes = subIndexesSet.toArray(new String[subIndexesSet.size()]);

    // set up directory wrapper providers
    Map<String, CompassSettings> dwSettingGroups = settings
            .getSettingGroups(LuceneEnvironment.DirectoryWrapper.PREFIX);
    if (dwSettingGroups.size() > 0) {
        ArrayList<DirectoryWrapperProvider> dws = new ArrayList<DirectoryWrapperProvider>();
        for (Map.Entry<String, CompassSettings> entry : dwSettingGroups.entrySet()) {
            String dwName = entry.getKey();
            if (log.isInfoEnabled()) {
                log.info("Building directory wrapper [" + dwName + "]");
            }
            CompassSettings dwSettings = entry.getValue();
            String dwType = dwSettings.getSetting(LuceneEnvironment.DirectoryWrapper.TYPE);
            if (dwType == null) {
                throw new ConfigurationException(
                        "Directory wrapper [" + dwName + "] has no type associated with it");
            }
            DirectoryWrapperProvider dw;
            try {
                dw = (DirectoryWrapperProvider) ClassUtils.forName(dwType, settings.getClassLoader())
                        .newInstance();
            } catch (Exception e) {
                throw new ConfigurationException("Failed to create directory wrapper [" + dwName + "]", e);
            }
            if (dw instanceof CompassConfigurable) {
                ((CompassConfigurable) dw).configure(dwSettings);
            }
            dws.add(dw);
        }
        directoryWrapperProviders = dws.toArray(new DirectoryWrapperProvider[dws.size()]);
    }

    this.localDirectoryCacheManager = new LocalDirectoryCacheManager(searchEngineFactory);
    localDirectoryCacheManager.configure(settings);
}

From source file:org.pentaho.di.job.entries.getpop.MailConnection.java

/**
 * Returns all subfolders of the specified folder
 *
 * @param folder/*  ww w.jav  a  2s . c o m*/
 *          parent folder
 * @return sub folders
 */
public String[] returnAllFolders(Folder folder) throws KettleException {
    HashSet<String> list = new HashSet<String>();
    list = returnSubfolders(folder);
    return list.toArray(new String[list.size()]);
}

From source file:org.apache.sysml.hops.codegen.opt.PlanSelectionFuseCostBased.java

private void createAndAddMultiAggPlans(CPlanMemoTable memo, ArrayList<Hop> roots) {
    //collect full aggregations as initial set of candidates
    HashSet<Long> fullAggs = new HashSet<>();
    Hop.resetVisitStatus(roots);//www  . ja  v  a2  s.c o  m
    for (Hop hop : roots)
        rCollectFullAggregates(hop, fullAggs);
    Hop.resetVisitStatus(roots);

    //remove operators with assigned multi-agg plans
    fullAggs.removeIf(p -> memo.contains(p, TemplateType.MAGG));

    //check applicability for further analysis
    if (fullAggs.size() <= 1)
        return;

    if (LOG.isTraceEnabled()) {
        LOG.trace("Found across-partition ua(RC) aggregations: "
                + Arrays.toString(fullAggs.toArray(new Long[0])));
    }

    //collect information for all candidates 
    //(subsumed aggregations, and inputs to fused operators) 
    List<AggregateInfo> aggInfos = new ArrayList<>();
    for (Long hopID : fullAggs) {
        Hop aggHop = memo.getHopRefs().get(hopID);
        AggregateInfo tmp = new AggregateInfo(aggHop);
        for (int i = 0; i < aggHop.getInput().size(); i++) {
            Hop c = HopRewriteUtils.isMatrixMultiply(aggHop) && i == 0
                    ? aggHop.getInput().get(0).getInput().get(0)
                    : aggHop.getInput().get(i);
            rExtractAggregateInfo(memo, c, tmp, TemplateType.CELL);
        }
        if (tmp._fusedInputs.isEmpty()) {
            if (HopRewriteUtils.isMatrixMultiply(aggHop)) {
                tmp.addFusedInput(aggHop.getInput().get(0).getInput().get(0).getHopID());
                tmp.addFusedInput(aggHop.getInput().get(1).getHopID());
            } else
                tmp.addFusedInput(aggHop.getInput().get(0).getHopID());
        }
        aggInfos.add(tmp);
    }

    if (LOG.isTraceEnabled()) {
        LOG.trace("Extracted across-partition ua(RC) aggregation info: ");
        for (AggregateInfo info : aggInfos)
            LOG.trace(info);
    }

    //sort aggregations by num dependencies to simplify merging
    //clusters of aggregations with parallel dependencies
    aggInfos = aggInfos.stream().sorted(Comparator.comparing(a -> a._inputAggs.size()))
            .collect(Collectors.toList());

    //greedy grouping of multi-agg candidates
    boolean converged = false;
    while (!converged) {
        AggregateInfo merged = null;
        for (int i = 0; i < aggInfos.size(); i++) {
            AggregateInfo current = aggInfos.get(i);
            for (int j = i + 1; j < aggInfos.size(); j++) {
                AggregateInfo that = aggInfos.get(j);
                if (current.isMergable(that)) {
                    merged = current.merge(that);
                    aggInfos.remove(j);
                    j--;
                }
            }
        }
        converged = (merged == null);
    }

    if (LOG.isTraceEnabled()) {
        LOG.trace("Merged across-partition ua(RC) aggregation info: ");
        for (AggregateInfo info : aggInfos)
            LOG.trace(info);
    }

    //construct and add multiagg template plans (w/ max 3 aggregations)
    for (AggregateInfo info : aggInfos) {
        if (info._aggregates.size() <= 1)
            continue;
        Long[] aggs = info._aggregates.keySet().toArray(new Long[0]);
        MemoTableEntry me = new MemoTableEntry(TemplateType.MAGG, aggs[0], aggs[1],
                (aggs.length > 2) ? aggs[2] : -1, aggs.length);
        for (int i = 0; i < aggs.length; i++) {
            memo.add(memo.getHopRefs().get(aggs[i]), me);
            addBestPlan(aggs[i], me);
            if (LOG.isTraceEnabled())
                LOG.trace("Added multiagg* plan: " + aggs[i] + " " + me);

        }
    }
}