Example usage for java.util LinkedList toArray

List of usage examples for java.util LinkedList toArray

Introduction

In this page you can find the example usage for java.util LinkedList toArray.

Prototype

@SuppressWarnings("unchecked")
public <T> T[] toArray(T[] a) 

Source Link

Document

Returns an array containing all of the elements in this list in proper sequence (from first to last element); the runtime type of the returned array is that of the specified array.

Usage

From source file:com.ikanow.aleph2.management_db.controllers.actors.BucketDeletionActor.java

/** Deletes the data in all data services
 *  TODO (ALEPH-26): assume default ones for now 
 * @param bucket - the bucket to cleanse
 *//*from w  ww .j av a  2  s  .  c om*/
public static CompletableFuture<Collection<BasicMessageBean>> deleteAllDataStoresForBucket(
        final DataBucketBean bucket, final IServiceContext service_context, boolean delete_bucket) {

    // Currently the only supported data service is the search index
    try {
        final LinkedList<CompletableFuture<BasicMessageBean>> vals = new LinkedList<>();

        service_context.listServiceProviders().stream().map(t3 -> t3._1().get())
                .filter(s -> IDataServiceProvider.class.isAssignableFrom(s.getClass()))
                .map(s -> (IDataServiceProvider) s).distinct().forEach(service -> {
                    if (!(delete_bucket && IStorageService.class.isAssignableFrom(service.getClass()))) {
                        // if deleting the bucket then don't need to remove the storage path
                        service.getDataService().ifPresent(ds -> vals
                                .add(ds.handleBucketDeletionRequest(bucket, Optional.empty(), delete_bucket)));
                    }
                });

        return CompletableFuture.allOf(vals.toArray(new CompletableFuture[0])).thenApply(__ -> {
            return vals.stream().map(x -> x.join()).collect(Collectors.toList());
        });
    } catch (Throwable t) {
        return CompletableFuture.completedFuture(
                Arrays.asList(ErrorUtils.buildErrorMessage(BucketDeletionActor.class.getSimpleName(),
                        "deleteAllDataStoresForBucket", ErrorUtils.getLongForm("{0}", t))));
    }
}

From source file:de.vanita5.twittnuker.util.net.ssl.AbstractCheckSignatureVerifier.java

/**
 * Extracts the array of SubjectAlt DNS or IP names from an X509Certificate.
 * Returns null if there aren't any.//from  w  w  w . jav  a 2  s .c o  m
 *
 * @param cert X509Certificate
 * @param hostname
 * @return Array of SubjectALT DNS or IP names stored in the certificate.
 */
private static String[] getSubjectAlts(final X509Certificate cert, final String hostname) {
    final int subjectType;
    if (isIPAddress(hostname)) {
        subjectType = 7;
    } else {
        subjectType = 2;
    }

    final LinkedList<String> subjectAltList = new LinkedList<String>();
    Collection<List<?>> c = null;
    try {
        c = cert.getSubjectAlternativeNames();
    } catch (final CertificateParsingException cpe) {
    }
    if (c != null) {
        for (final List<?> aC : c) {
            final List<?> list = aC;
            final int type = ((Integer) list.get(0)).intValue();
            if (type == subjectType) {
                final String s = (String) list.get(1);
                subjectAltList.add(s);
            }
        }
    }
    if (!subjectAltList.isEmpty()) {
        final String[] subjectAlts = new String[subjectAltList.size()];
        subjectAltList.toArray(subjectAlts);
        return subjectAlts;
    } else
        return null;
}

From source file:com.epam.reportportal.apache.http.conn.ssl.AbstractVerifier.java

/**
 * Extracts the array of SubjectAlt DNS or IP names from an X509Certificate.
 * Returns null if there aren't any./*  w ww .j ava 2 s . c o m*/
 *
 * @param cert X509Certificate
 * @param hostname
 * @return Array of SubjectALT DNS or IP names stored in the certificate.
 */
private static String[] getSubjectAlts(final X509Certificate cert, final String hostname) {
    final int subjectType;
    if (isIPAddress(hostname)) {
        subjectType = 7;
    } else {
        subjectType = 2;
    }

    final LinkedList<String> subjectAltList = new LinkedList<String>();
    Collection<List<?>> c = null;
    try {
        c = cert.getSubjectAlternativeNames();
    } catch (final CertificateParsingException cpe) {
    }
    if (c != null) {
        for (final List<?> aC : c) {
            final List<?> list = aC;
            final int type = ((Integer) list.get(0)).intValue();
            if (type == subjectType) {
                final String s = (String) list.get(1);
                subjectAltList.add(s);
            }
        }
    }
    if (!subjectAltList.isEmpty()) {
        final String[] subjectAlts = new String[subjectAltList.size()];
        subjectAltList.toArray(subjectAlts);
        return subjectAlts;
    } else {
        return null;
    }
}

From source file:gr.abiss.calipso.jpasearch.specifications.GenericSpecifications.java

@Deprecated
protected static Predicate getPredicate(final Class clazz, final Restriction searchTerms,
        Root<Persistable> root, CriteriaBuilder cb) {
    LinkedList<Predicate> predicates = new LinkedList<Predicate>();
    Predicate predicate;/*w  w  w . ja  va2s  . co  m*/
    // process child restrictions
    if (!CollectionUtils.isEmpty(searchTerms.getRestrictions())) {
        for (Restriction restriction : searchTerms.getRestrictions()) {
            predicates.add(getPredicate(clazz, restriction, root, cb));
        }
    }
    // process main restriction
    if (StringUtils.isNotBlank(searchTerms.getField())) {
        String propertyName = searchTerms.getField();
        addPredicate(clazz, root, cb, predicates,
                searchTerms.getValues().toArray(new String[searchTerms.getValues().size()]), propertyName);
    }
    if (searchTerms.getJunction().equals(Restriction.Junction.OR)) {
        predicate = cb.or(predicates.toArray(new Predicate[predicates.size()]));
    } else {
        predicate = cb.and(predicates.toArray(new Predicate[predicates.size()]));
    }
    return predicate;
}

From source file:org.exfio.csyncdroid.resource.LocalCalendar.java

public static LocalCalendar[] findAll(Account account, ContentProviderClient providerClient,
        AccountSettings accountSettings) throws RemoteException {
    @Cleanup/*from   ww  w .j  ava2s .c  o  m*/
    Cursor cursor = providerClient.query(calendarsURI(account),
            new String[] { Calendars._ID, Calendars.NAME, COLLECTION_COLUMN_CTAG,
                    Calendars.CALENDAR_TIME_ZONE },
            Calendars.DELETED + "=0 AND " + Calendars.SYNC_EVENTS + "=1", null, null);

    LinkedList<LocalCalendar> calendars = new LinkedList<LocalCalendar>();
    while (cursor != null && cursor.moveToNext())
        calendars.add(new LocalCalendar(account, providerClient, accountSettings, cursor.getInt(0),
                cursor.getString(3)));
    return calendars.toArray(new LocalCalendar[0]);
}

From source file:gr.abiss.calipso.jpasearch.specifications.GenericSpecifications.java

protected static Predicate getRootPredicate(final Class clazz, final Map<String, String[]> searchTerms,
        Root<Persistable> root, CriteriaBuilder cb, boolean skipSimpleSearch) {
    LinkedList<Predicate> predicates = new LinkedList<Predicate>();
    Predicate predicate;//from w  ww  .ja  va  2s  . c o  m

    parseSearchTerms(clazz, searchTerms, root, cb, predicates);
    if (!skipSimpleSearch) {
        // handle "_all", i.e. simple search
        if (searchTerms.containsKey(SIMPLE_SEARCH_PARAM_NAME) && predicates.size() == 0) {
            Map<String, String[]> simpleSearchTerms = getSimpleSearchTerms(clazz,
                    searchTerms.get(SIMPLE_SEARCH_PARAM_NAME));
            parseSearchTerms(clazz, simpleSearchTerms, root, cb, predicates);
        }
    }
    if (searchTerms.containsKey(SEARCH_MODE) && searchTerms.get(SEARCH_MODE)[0].equalsIgnoreCase(OR)
    // A disjunction of zero predicates is false
            && predicates.size() > 0) {
        predicate = cb.or(predicates.toArray(new Predicate[predicates.size()]));
    } else {
        predicate = cb.and(predicates.toArray(new Predicate[predicates.size()]));
    }
    return predicate;
}

From source file:com.swordlord.gozer.components.wicket.action.button.list.GWListActionToolbar.java

protected List<GActionBase> filterKnownActions(ObjectBase parent) {
    List<GActionBase> actions = new ArrayList<GActionBase>();
    if (parent == null) {
        return actions;
    }//from   ww w. j a v a2 s . c  om

    LinkedList<ObjectBase> children = parent.getChildren();

    ObjectBase[] arrKiddies = children.toArray(new ObjectBase[0]);
    for (ObjectBase ob : arrKiddies) {
        if (KNOW_ACTIONS.contains(ob.getClass())) {
            actions.add((GActionBase) ob);
        }
    }

    return actions;
}

From source file:org.apache.hadoop.hbase.util.RegionSplitter.java

static LinkedList<Pair<byte[], byte[]>> splitScan(LinkedList<Pair<byte[], byte[]>> regionList, HTable table,
        SplitAlgorithm splitAlgo) throws IOException, InterruptedException {
    LinkedList<Pair<byte[], byte[]>> finished = Lists.newLinkedList();
    LinkedList<Pair<byte[], byte[]>> logicalSplitting = Lists.newLinkedList();
    LinkedList<Pair<byte[], byte[]>> physicalSplitting = Lists.newLinkedList();

    // get table info
    Path rootDir = FSUtils.getRootDir(table.getConfiguration());
    Path tableDir = FSUtils.getTableDir(rootDir, table.getName());
    FileSystem fs = tableDir.getFileSystem(table.getConfiguration());
    HTableDescriptor htd = table.getTableDescriptor();

    // clear the cache to forcibly refresh region information
    table.clearRegionCache();/*from   w ww .  j ava2  s.  c  om*/

    // for every region that hasn't been verified as a finished split
    for (Pair<byte[], byte[]> region : regionList) {
        byte[] start = region.getFirst();
        byte[] split = region.getSecond();

        // see if the new split daughter region has come online
        try {
            HRegionInfo dri = table.getRegionLocation(split).getRegionInfo();
            if (dri.isOffline() || !Bytes.equals(dri.getStartKey(), split)) {
                logicalSplitting.add(region);
                continue;
            }
        } catch (NoServerForRegionException nsfre) {
            // NSFRE will occur if the old hbase:meta entry has no server assigned
            LOG.info(nsfre);
            logicalSplitting.add(region);
            continue;
        }

        try {
            // when a daughter region is opened, a compaction is triggered
            // wait until compaction completes for both daughter regions
            LinkedList<HRegionInfo> check = Lists.newLinkedList();
            check.add(table.getRegionLocation(start).getRegionInfo());
            check.add(table.getRegionLocation(split).getRegionInfo());
            for (HRegionInfo hri : check.toArray(new HRegionInfo[] {})) {
                byte[] sk = hri.getStartKey();
                if (sk.length == 0)
                    sk = splitAlgo.firstRow();
                String startKey = splitAlgo.rowToStr(sk);

                HRegionFileSystem regionFs = HRegionFileSystem
                        .openRegionFromFileSystem(table.getConfiguration(), fs, tableDir, hri, true);

                // check every Column Family for that region
                boolean refFound = false;
                for (HColumnDescriptor c : htd.getFamilies()) {
                    if ((refFound = regionFs.hasReferences(htd.getTableName().getNameAsString()))) {
                        break;
                    }
                }

                // compaction is completed when all reference files are gone
                if (!refFound) {
                    check.remove(hri);
                }
            }
            if (check.isEmpty()) {
                finished.add(region);
            } else {
                physicalSplitting.add(region);
            }
        } catch (NoServerForRegionException nsfre) {
            LOG.debug("No Server Exception thrown for: " + splitAlgo.rowToStr(start));
            physicalSplitting.add(region);
            table.clearRegionCache();
        }
    }

    LOG.debug("Split Scan: " + finished.size() + " finished / " + logicalSplitting.size() + " split wait / "
            + physicalSplitting.size() + " reference wait");

    return finished;
}

From source file:org.psidnell.omnifocus.integrationtest.IntegrationTest.java

private void runMainAndDiff(final String name, String[] extraArgs, boolean doDiff)
        throws Exception, IOException {
    File tmp = new File(tmpDataDir, name);
    String[] args = { "-import", PREVIOUSLY_EXPORTED_DATA_FILE.getPath(), "-o", tmp.getPath() };
    LinkedList<String> combinedArgs = new LinkedList<>();
    combinedArgs.addAll(Arrays.asList(args));
    combinedArgs.addAll(Arrays.asList(extraArgs));
    Main.main(combinedArgs.toArray(new String[combinedArgs.size()]));
    if (doDiff) {
        Diff.diff(new File("src/test/data/" + name), tmp);
    }//from   w w  w  .  j a v a  2  s. c  o  m
}

From source file:com.espertech.esper.epl.core.ResultSetProcessorSimple.java

/**
 * Applies the select-clause to the given events returning the selected events. The number of events stays the
 * same, i.e. this method does not filter it just transforms the result set.
 * <p>/*from   ww  w  . jav  a  2s  .c  o m*/
 * Also applies a having clause.
 * @param exprProcessor - processes each input event and returns output event
 * @param events - input events
 * @param optionalHavingNode - supplies the having-clause expression
 * @param isNewData - indicates whether we are dealing with new data (istream) or old data (rstream)
 * @param isSynthesize - set to true to indicate that synthetic events are required for an iterator result set
 * @param exprEvaluatorContext context for expression evalauation
 * @return output events, one for each input event
 */
protected static EventBean[] getSelectEventsHaving(SelectExprProcessor exprProcessor, EventBean[] events,
        ExprEvaluator optionalHavingNode, boolean isNewData, boolean isSynthesize,
        ExprEvaluatorContext exprEvaluatorContext) {
    if (events == null) {
        return null;
    }

    LinkedList<EventBean> result = new LinkedList<EventBean>();

    EventBean[] eventsPerStream = new EventBean[1];
    for (EventBean theEvent : events) {
        eventsPerStream[0] = theEvent;

        Boolean passesHaving = (Boolean) optionalHavingNode.evaluate(eventsPerStream, isNewData,
                exprEvaluatorContext);
        if ((passesHaving == null) || (!passesHaving)) {
            continue;
        }

        result.add(exprProcessor.process(eventsPerStream, isNewData, isSynthesize, exprEvaluatorContext));
    }

    if (!result.isEmpty()) {
        return result.toArray(new EventBean[result.size()]);
    } else {
        return null;
    }
}