Example usage for java.util Set toArray

List of usage examples for java.util Set toArray

Introduction

In this page you can find the example usage for java.util Set toArray.

Prototype

<T> T[] toArray(T[] a);

Source Link

Document

Returns an array containing all of the elements in this set; the runtime type of the returned array is that of the specified array.

Usage

From source file:com.rodaxsoft.mailgun.CampaignManager.java

/**
 * Saves campaign events to a CSV file with the following format:
 * <code>&lt;campaign name&gt;_(&lt;campaign id&gt;)_&lt;timestamp&gt;.csv</code>
 * @param campaignId The campaign ID//from ww  w .  ja v a2  s .co  m
 * @throws ContextedException if a processing error occurs
 * @throws IOException if an I/O error occurs
 */
void saveCampaignEventsToCSV(String campaignId) throws ContextedException, IOException {

    Campaign campaign = getCampaign(campaignId);

    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");

    String dateTime = format.format(new Date());
    String fileName;

    if (campaign != null) {

        String name = StringUtils.replace(campaign.getName(), " ", "_");
        fileName = new StringBuilder(name).append("_(").append(campaignId).append(")_").append(dateTime)
                .append(".csv").toString();
    } else {
        fileName = campaignId + "_" + dateTime + ".csv";
    }

    CSVPrinter csvPrinter = null;
    PrintWriter pw = null;
    CSVFormat csvFormat = null;
    try {

        pw = new PrintWriter(fileName);
        final List<Map<String, Object>> events = getEvents(campaignId);

        for (Map<String, Object> map : events) {

            if (null == csvPrinter) {
                final Set<String> keySet = map.keySet();
                int size = keySet.size();
                String[] keys = keySet.toArray(new String[size]);
                csvFormat = CSVFormat.DEFAULT.withHeader(keys);
                csvPrinter = new CSVPrinter(pw, csvFormat);
            }
            //            city   domain   tags   timestamp   region   ip   country   recipient   event   user_vars

            String[] headers = csvFormat.getHeader();
            for (String key : headers) {
                csvPrinter.print(map.get(key));
            }

            csvPrinter.println();
        }

    } finally {

        if (csvPrinter != null) {
            csvPrinter.flush();
        }

        IOUtils.closeQuietly(csvPrinter);
    }

}

From source file:edu.emory.cci.aiw.cvrg.eureka.servlet.JobPatientCountsServlet.java

@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    String jobIdStr = req.getParameter("jobId");
    String key = req.getParameter("propId");
    Long jobId = null;/* w  w w. ja  v a2s.  c  o m*/
    if (StringUtils.isNotEmpty(jobIdStr)) {
        try {
            jobId = Long.valueOf(jobIdStr);
            try {
                Statistics jobStats = this.servicesClient.getJobStats(jobId, key);
                Map<String, String> childrenToParents = jobStats.getChildrenToParents();
                Map<String, Integer> counts = jobStats.getCounts();

                Counts results = new Counts();
                List<Count> countResults = new ArrayList<>();

                Set<String> keySet = counts.keySet();
                List<Phenotype> summarizedConcepts = this.servicesClient
                        .getPhenotypes(keySet.toArray(new String[keySet.size()]), true);
                Map<String, Phenotype> keyIdToDE = new HashMap<>();
                for (Phenotype de : summarizedConcepts) {
                    keyIdToDE.put(de.getKey(), de);
                }

                for (Map.Entry<String, Integer> me : counts.entrySet()) {
                    Count count = new Count();
                    count.setKey(me.getKey());
                    count.setParentKeyId(childrenToParents.get(me.getKey()));
                    count.setCount(me.getValue());
                    Phenotype de = keyIdToDE.get(me.getKey());
                    if (de != null) {
                        count.setDisplayName(de.getDisplayName());
                    }
                    countResults.add(count);
                }

                results.setCounts(countResults);

                this.writer.writeValue(resp.getOutputStream(), results);
            } catch (ClientException ex) {
                if (ex.getResponseStatus() == Status.NOT_FOUND) {
                    resp.sendError(HttpServletResponse.SC_PRECONDITION_FAILED, "No job with jobId " + jobIdStr);
                } else {
                    throw new ServletException("Error getting patient counts for job " + jobId, ex);
                }
            }
        } catch (NumberFormatException nfe) {
            resp.sendError(HttpServletResponse.SC_PRECONDITION_FAILED,
                    "jobId parameter must be a long, was " + jobIdStr);
        }
    } else {
        resp.sendError(HttpServletResponse.SC_PRECONDITION_FAILED, "jobId parameter is required");
    }
}

From source file:springfox.documentation.spring.data.rest.CombinedRequestHandler.java

@Override
public PatternsRequestCondition getPatternsCondition() {
    Set<String> patterns = newHashSet(first.getPatternsCondition().getPatterns());
    patterns.addAll(second.getPatternsCondition().getPatterns());
    return new PatternsRequestCondition(patterns.toArray(new String[patterns.size()]));
}

From source file:io.druid.query.search.FragmentSearchQuerySpec.java

@JsonCreator
public FragmentSearchQuerySpec(@JsonProperty("values") List<String> values,
        @JsonProperty("caseSensitive") boolean caseSensitive) {
    this.values = values;
    this.caseSensitive = caseSensitive;
    Set<String> set = new TreeSet<>();
    if (values != null) {
        for (String value : values) {
            set.add(value);/*from  ww w.  j  a v  a 2  s .c  om*/
        }
    }
    target = set.toArray(new String[set.size()]);
}

From source file:com.avanza.ymer.YmerMirrorIntegrationTest.java

@Test
public void mbeanInvoke() throws Exception {
    MBeanServer server = ManagementFactory.getPlatformMBeanServer();
    ObjectName nameTemplate = ObjectName.getInstance(
            "se.avanzabank.space.mirror:type=DocumentWriteExceptionHandler,name=documentWriteExceptionHandler");
    Set<ObjectName> names = server.queryNames(nameTemplate, null);
    assertThat(names.size(), is(greaterThan(0)));
    server.invoke(names.toArray(new ObjectName[1])[0], "useCatchesAllHandler", null, null);
}

From source file:gov.nih.nci.caintegrator.application.arraydata.AffymetrixCnPlatformLoader.java

private String[] getSymbols(String[] fields) {
    ///*w ww.j a va2  s. com*/
    // This method parses the value from the gene symbol column
    // which is obtained from the manufacturers platform annotation file.
    // This involves breaking the string down into substrings and
    // then finally extracting the gene symbol.
    //
    // An example of this value is as follows:
    // "NM_181714 // intron // 0 // Hs.21945 // LCA5 // 167691 // Leber congenital amaurosis 5
    // /// NM_001122769 // intron // 0 // Hs.21945 // LCA5 // 167691 // Leber congenital amaurosis 5
    //
    // Note in the above string, the top level separator is /// (3 forward slashes)
    // and the second level separator is // (2 forward slashes)
    // A second example of this value is as follows:
    // LCA5 /// LCA5

    // Get the gene symbol field and separate into substrings.
    String[] subField = getAnnotationValue(fields, GENE_SYMBOL_HEADER).split("///");

    // extract the symbols from the array of substrings
    Set<String> symbolsSet = parseSubString(subField);

    // convert to array
    String[] symbols = symbolsSet.toArray(new String[symbolsSet.size()]);

    return symbols;
}

From source file:cc.redpen.server.api.RedPenResourceTest.java

@Override
protected Class<?>[] getClasses() {
    try {//from   w  w  w . j av a2  s. c om
        Set<Class<?>> classes = new ApplicationFileLoader("application").getClasses();
        Class<?>[] classesArray = new Class[classes.size()];
        return classes.toArray(classesArray);
    } catch (FileNotFoundException e) {
        throw new RuntimeException(e);
    }
}

From source file:com.bstek.dorado.data.type.manager.DefaultDataTypeManager.java

/**
 * ??DataType???DataType/*from   w w  w  .ja v  a2s  .c  o  m*/
 */
private void filterMatchingDataTypes(Set<DataTypeWrapper> dtws) {
    DataTypeWrapper[] dtwArray = new DataTypeWrapper[dtws.size()];
    dtws.toArray(dtwArray);

    for (DataTypeWrapper dtw1 : dtwArray) {
        Iterator<DataTypeWrapper> iter = dtws.iterator();
        while (iter.hasNext()) {
            DataTypeWrapper dtw2 = iter.next();
            if (dtw1 != dtw2) {
                if (dtw1.getType().isAssignableFrom(dtw2.getType())) {
                    dtws.remove(dtw1);
                    break;
                }

                if (isChildTypeOf(dtw2.getDataType(), dtw1.getDataType())) {
                    dtws.remove(dtw1);
                    break;
                }
            }
        }
    }
}

From source file:com.acc.controller.OrderController.java

/**
 * Web service for getting current user's order history data.<br>
 * Sample call: https://localhost:9002/rest/v1/mysite/orders?statuses=COMPLETED,CANCELLED&pageSize=5&currentPage=0 <br>
 * This method requires authentication.<br>
 * Method type : <code>GET</code>.<br>
 * Method is restricted for <code>HTTPS</code> channel.
 * /*from  ww w. j  a  v a  2  s .  c o m*/
 * @param statuses
 *           - filter for order statuses- optional
 * @param currentPage
 *           - pagination parameter- optional
 * @param pageSize
 *           - {@link PaginationData} parameter - optional
 * @param sort
 *           - sort criterion
 *
 * @return {@link OrderData} as response body.
 */
@Secured("ROLE_CUSTOMERGROUP")
@RequestMapping(method = RequestMethod.GET)
@ResponseBody
public OrderHistoriesData getPagedOrdersForStatuses(@RequestParam(defaultValue = "all") final String statuses,
        @RequestParam(defaultValue = "-1") final int currentPage,
        @RequestParam(defaultValue = "-1") final int pageSize,
        @RequestParam(required = false) final String sort) {
    final PageableData pageableData = resolvePageableData(currentPage, pageSize, sort);

    if (pageableData == null) {
        final SearchPageData<OrderHistoryData> result = new SearchPageData<OrderHistoryData>();
        result.setResults(getAllOrdersForStatuses(statuses));
        return createOrderHistoriesData(result);
    }

    if ("all".equals(statuses)) {
        return createOrderHistoriesData(orderFacade.getPagedOrderHistoryForStatuses(pageableData));
    }
    final Set<OrderStatus> statusSet = extractOrderStatuses(statuses);

    return createOrderHistoriesData(orderFacade.getPagedOrderHistoryForStatuses(pageableData,
            statusSet.toArray(new OrderStatus[statusSet.size()])));
}

From source file:com.uber.hoodie.HoodieReadClient.java

/**
 * Given a bunch of hoodie keys, fetches all the individual records out as a data frame
 *
 * @return a dataframe//ww w  .j ava  2s. c  o  m
 */
public Dataset<Row> read(JavaRDD<HoodieKey> hoodieKeys, int parallelism) throws Exception {

    assertSqlContext();
    JavaPairRDD<HoodieKey, Optional<String>> keyToFileRDD = index.fetchRecordLocation(hoodieKeys, jsc,
            hoodieTable);
    List<String> paths = keyToFileRDD.filter(keyFileTuple -> keyFileTuple._2().isPresent())
            .map(keyFileTuple -> keyFileTuple._2().get()).collect();

    // record locations might be same for multiple keys, so need a unique list
    Set<String> uniquePaths = new HashSet<>(paths);
    Dataset<Row> originalDF = sqlContextOpt.get().read()
            .parquet(uniquePaths.toArray(new String[uniquePaths.size()]));
    StructType schema = originalDF.schema();
    JavaPairRDD<HoodieKey, Row> keyRowRDD = originalDF.javaRDD().mapToPair(row -> {
        HoodieKey key = new HoodieKey(row.getAs(HoodieRecord.RECORD_KEY_METADATA_FIELD),
                row.getAs(HoodieRecord.PARTITION_PATH_METADATA_FIELD));
        return new Tuple2<>(key, row);
    });

    // Now, we need to further filter out, for only rows that match the supplied hoodie keys
    JavaRDD<Row> rowRDD = keyRowRDD.join(keyToFileRDD, parallelism).map(tuple -> tuple._2()._1());

    return sqlContextOpt.get().createDataFrame(rowRDD, schema);
}