Example usage for java.util Vector toArray

List of usage examples for java.util Vector toArray

Introduction

In this page you can find the example usage for java.util Vector toArray.

Prototype

@SuppressWarnings("unchecked")
public synchronized <T> T[] toArray(T[] a) 

Source Link

Document

Returns an array containing all of the elements in this Vector in the correct order; the runtime type of the returned array is that of the specified array.

Usage

From source file:com.l2jfree.gameserver.handler.admincommands.AdminSortMultisellItems.java

private String[] sortArrayByTypeAndPrice(String[] array) {
    Vector<L2Item> items = getItemVector(array);
    Vector<String> newArray = new Vector<String>();
    items = sortItemsByPrice(items, array);
    items = sortItemsByType(items);//from   w ww  . j  a va 2  s. c o  m

    for (L2Item item : items) {
        if (item == null)
            continue;

        int itemId = item.getItemId();
        for (String finalElement : array) {
            String[] param = finalElement.split(splitting);

            if (param.length != 3)
                continue;

            String temp = (MULTISELL_ID_SORT) ? param[0] : param[1];
            int id = Integer.valueOf(temp);

            if (itemId == id) {
                newArray.add(finalElement);
            }
        }
    }

    return newArray.toArray(new String[newArray.size()]);
}

From source file:uk.ac.babraham.SeqMonk.Pipelines.AntisenseTranscriptionPipeline.java

private Feature[] getValidFeatures(Chromosome c) {

    Feature[] features = collection().genome().annotationCollection().getFeaturesForType(c,
            optionsPanel.getSelectedFeatureType());

    Arrays.sort(features);/* ww w . j a v  a2  s  .co  m*/

    if (optionsPanel.ignoreOverlaps()) {
        Vector<Feature> validFeatures = new Vector<Feature>();

        FEATURE: for (int f = 0; f < features.length; f++) {
            for (int g = f - 1; g >= 0; g--) {

                if (!features[g].chromosomeName().equals(features[f].chromosomeName())) {
                    break;
                }

                if (SequenceRead.overlaps(features[f].location().packedPosition(),
                        features[g].location().packedPosition())) {
                    if (features[f].location().strand() != features[g].location().strand()) {

                        continue FEATURE;
                    }
                } else {

                    if (features[g].location().end() < features[f].location().start() - 1000000) {

                        break;
                    }
                }
            }
            for (int g = f + 1; g < features.length; g++) {

                if (!features[g].chromosomeName().equals(features[f].chromosomeName())) {
                    break;
                }

                if (SequenceRead.overlaps(features[f].location().packedPosition(),
                        features[g].location().packedPosition())) {
                    if (features[f].location().strand() != features[g].location().strand()) {
                        continue FEATURE;
                    }
                } else {
                    break;
                }
            }

            validFeatures.add(features[f]);
        }

        features = validFeatures.toArray(new Feature[0]);

    }

    return features;
}

From source file:gda.device.detector.pixium.PixiumDetector.java

private String[] createFileName() throws Exception {
    Vector<String> output = new Vector<String>();
    String absolutePath = scanSaveFolder.getAbsolutePath() + File.separator;
    // cannot wait EPICS AD full file name update before collect next image - to support NDArray buffering
    String fileName = controller.getTiff().getFileName();
    fileName = fileName.trim();//ww w .j ava  2s. c  o  m
    if ((int) getCollectionTime() > 1) {
        int i = 0;
        // handle multiple images per data scan point.
        for (i = 0; i < getCollectionTime(); i++) {
            output.add(String.format(controller.getTiff().getFileTemplate(), absolutePath, fileName,
                    i + imagecounter));
        }

        imagecounter = i + imagecounter;
    } else {
        // single image per scan data point
        output.add(
                String.format(controller.getTiff().getFileTemplate(), absolutePath, fileName, scanpointnumber));
    }
    scanpointnumber += 1;
    return output.toArray(new String[] {});
}

From source file:com.beesham.popularmovies.sync.MoviesSyncAdapter.java

/**
 * Parses the movie data from a JSON string
 * and saves it in the database//w  w  w .  ja  v a2s .co  m
 * @param moviesJsonStr
 */
private void getMovieDataFromJson(String moviesJsonStr) throws JSONException {
    final String BASE_IMAGE_URL = getContext().getString(R.string.movies_base_image_url);

    JSONObject moviesJSON = new JSONObject(moviesJsonStr);
    JSONArray moviesListJSON = moviesJSON.getJSONArray("results");

    Vector<ContentValues> contentValuesVector = new Vector<>(moviesListJSON.length());

    for (int i = 0; i < moviesListJSON.length(); i++) {
        String title;
        String id;
        String overview;
        String posterPath;
        double rating;
        String release_date;
        String trailers;
        String reviews;

        JSONObject movieJSON = moviesListJSON.getJSONObject(i);

        title = movieJSON.getString("title");
        id = movieJSON.getString("id");
        overview = movieJSON.getString("overview");
        posterPath = movieJSON.getString("poster_path");
        rating = movieJSON.getDouble("vote_average");
        release_date = movieJSON.getString("release_date");
        trailers = getTrailersOrReviews(id, 0);
        reviews = getTrailersOrReviews(id, 1);

        ContentValues movieValues = new ContentValues();

        movieValues.put(MoviesEntry.COLUMN_MOVIE_ID, id);
        movieValues.put(MoviesEntry.COLUMN_MOVIE_TITLE, title);
        movieValues.put(MoviesEntry.COLUMN_MOVIE_SYNOPSIS, overview);
        movieValues.put(MoviesEntry.COLUMN_MOVIE_POSTER, BASE_IMAGE_URL + posterPath);
        movieValues.put(MoviesEntry.COLUMN_MOVIE_USER_RATING, rating);
        movieValues.put(MoviesEntry.COLUMN_MOVIE_RELEASE_DATE, release_date);
        movieValues.put(MoviesEntry.COLUMN_MOVIE_TRAILERS, trailers);
        movieValues.put(MoviesEntry.COLUMN_MOVIE_REVIEWS, reviews);

        contentValuesVector.add(movieValues);
    }

    int inserted = 0;
    if (contentValuesVector.size() > 0) {
        ContentValues[] contentValuesArray = new ContentValues[contentValuesVector.size()];
        contentValuesVector.toArray(contentValuesArray);

        getContext().getContentResolver().delete(MoviesEntry.CONTENT_URI, null, null);
        inserted = getContext().getContentResolver().bulkInsert(MoviesEntry.CONTENT_URI, contentValuesArray);
    }

}

From source file:de.juwimm.cms.remote.UserServiceSpringImpl.java

/**
 * Get all Tasks for the actual User.<br>
 * If a <i>siteRoot</i> is currently logged in, he will get all Tasks.
 * // ww  w . j  av a2 s .c o m
 * @return An Array of TaskValue Objects
 * 
 * @see de.juwimm.cms.remote.UserServiceSpring#getAllTasks()
 */
@Override
protected TaskValue[] handleGetAllTasks() throws Exception {
    UserHbm user = null;
    try {
        user = super.getUserHbmDao().load(AuthenticationHelper.getUserName());
        int siz = 0;
        Iterator it = null;
        if (!getUserHbmDao().isInRole(user, UserRights.SITE_ROOT, user.getActiveSite())) {
            if (log.isDebugEnabled())
                log.debug("SiteRoot requested All Tasks");
            Collection coll = super.getTaskHbmDao().findAll(user.getActiveSite().getSiteId());
            siz = coll.size();
            it = coll.iterator();
            TaskValue[] itarr = new TaskValue[siz];
            if (siz > 0) {
                for (int i = 0; i < siz; i++) {
                    itarr[i] = ((TaskHbm) it.next()).getTaskValue();
                }
            }
            return itarr;
        }
        if (log.isDebugEnabled())
            log.debug("User requested Tasks");
        Vector<TaskValue> vec = new Vector<TaskValue>();
        Collection coll = super.getTaskHbmDao().findAll(user.getActiveSite().getSiteId());
        it = coll.iterator();

        while (it.hasNext()) {
            TaskHbm task = (TaskHbm) it.next();
            if (user.equals(task.getReceiver())
                    || (!getUserHbmDao().isInRole(user, task.getReceiverRole(), user.getActiveSite())
                            && (task.getUnit() == null
                                    || getUserHbmDao().isInUnit(task.getUnit().getUnitId(), user)))) {
                vec.add(task.getTaskValue());
            }
        }
        return vec.toArray(new TaskValue[vec.size()]);
    } catch (Exception e) {
        throw new UserException(e.getMessage());
    }
}

From source file:com.sos.VirtualFileSystem.SFTP.SOSVfsSFtp.java

/**
 * return a listing of the files in a directory in long format on
 * the remote machine/*  ww w .  jav  a 2 s .c om*/
 * @param pathname on remote machine
 * @return a listing of the contents of a directory on the remote machine
 * @exception Exception
 * @see #nList()
 * @see #nList( String )
 * @see #dir()
 */

public SOSFileList dir(final String pathname) {
    Vector<String> strList = getFilenames(pathname);
    String[] strT = strList.toArray(new String[strList.size()]);
    SOSFileList objFileList = new SOSFileList(strT);
    return objFileList;
}

From source file:com.adito.vfs.webdav.methods.GET.java

private Range[] processRangeHeader(DAVTransaction transaction, VFSResource resource) throws IOException {

    try {/*from ww w .j  a va  2s .  c om*/
        if (transaction.getRequest().getHeader("Range") != null) {

            String header = transaction.getRequest().getHeader("Range").toLowerCase();

            if (header.startsWith("bytes=")) {

                Vector v = new Vector();

                StringTokenizer tokens = new StringTokenizer(header.substring(6), ",");
                while (tokens.hasMoreTokens()) {
                    String r = tokens.nextToken();

                    if (log.isDebugEnabled())
                        log.debug("Processing byte range " + r);

                    int idx = r.indexOf('-');

                    String startPoint = r.substring(0, idx);
                    String endPoint = r.substring(idx + 1);

                    Range newRange = new Range();

                    if ("".equals(startPoint) && !"".equals(endPoint)) {

                        newRange.count = Long.parseLong(endPoint);
                        newRange.startPosition = resource.getFile().getContent().getSize() - newRange.count;
                        v.add(newRange);

                    } else if (!"".equals(startPoint) && "".equals(endPoint)) {

                        newRange.startPosition = Long.parseLong(startPoint);
                        newRange.count = resource.getFile().getContent().getSize() - newRange.startPosition;
                        v.add(newRange);

                    } else if (!"".equals(startPoint) && !"".equals(endPoint)) {

                        newRange.startPosition = Long.parseLong(startPoint);
                        newRange.count = Long.parseLong(endPoint) - newRange.startPosition;
                        v.add(newRange);

                    } else {
                        log.error("Unsupported byte range element: " + r);
                    }
                }

                if (v.size() > 0) {
                    return (Range[]) v.toArray(new Range[0]);
                }
            }

        }
    } catch (Throwable t) {
        log.error("Failed to process byte range header " + transaction.getRequest().getHeader("Range"), t);
        throw new IOException("Invalid range");
    }

    return null;
}

From source file:uk.ac.babraham.SeqMonk.Pipelines.IntronRegressionPipeline.java

protected void startPipeline() {

    // We first need to generate probes over all of the features listed in
    // the feature types.  The probes should cover the whole area of the
    // feature regardless of where it splices.

    Vector<Probe> probes = new Vector<Probe>();
    int minDensity = optionsPanel.minDensity();
    int minLength = optionsPanel.minLength();
    double maxPValue = optionsPanel.maxPValue();
    int binSize = optionsPanel.measurementBinSize();

    QuantitationStrandType readFilter = optionsPanel.readFilter();

    Chromosome[] chrs = collection().genome().getAllChromosomes();

    for (int c = 0; c < chrs.length; c++) {
        if (cancel) {
            progressCancelled();//from  w  w  w  .j  a v a  2s .  co m
            return;
        }

        Vector<Probe> probesForThisChromosome = new Vector<Probe>();

        progressUpdated("Making probes", c, chrs.length);

        Feature[] features = getValidFeatures(chrs[c]);

        for (int f = 0; f < features.length; f++) {
            if (cancel) {
                progressCancelled();
                return;
            }

            // Now we can iterate through the introns in this feature
            if (!(features[f].location() instanceof SplitLocation))
                continue; // There are no introns here

            Location[] subLocations = ((SplitLocation) features[f].location()).subLocations();

            // TODO: Reverse the subLocations if its a reverse feature            
            for (int intron = 1; intron < subLocations.length; intron++) {

                int start = subLocations[intron - 1].end();
                int end = subLocations[intron].start();

                if ((end - start) + 1 < minLength) {
                    continue; // This intron is too short.
                }

                // TODO: We could throw away any probes which didn't have enough reads in any feature

                Probe p = new Probe(chrs[c], start, end, features[f].location().strand(),
                        features[f].name() + "_" + intron);
                probesForThisChromosome.add(p);

            }
        }

        // Now we can deduplicate the probes for this chromosome and add them to the main collection
        Probe[] dupProbes = probesForThisChromosome.toArray(new Probe[0]);
        Arrays.sort(dupProbes);

        for (int p = 0; p < dupProbes.length; p++) {
            if (p > 0 && dupProbes[p].packedPosition() == dupProbes[p - 1].packedPosition())
                continue;
            probes.add(dupProbes[p]);
        }

    }

    Probe[] allProbes = probes.toArray(new Probe[0]);

    collection().setProbeSet(new ProbeSet("Features over " + optionsPanel.getSelectedFeatureType(), allProbes));

    // Now we go back through the probes and quantitate them
    for (int p = 0; p < allProbes.length; p++) {

        if (cancel) {
            progressCancelled();
            return;
        }

        if (p % 1000 == 0) {
            progressUpdated("Quantitated " + p + " out of " + allProbes.length + " probes", p,
                    allProbes.length);
        }

        for (int d = 0; d < data.length; d++) {
            long[] reads = data[d].getReadsForProbe(allProbes[p]);

            int[] countsPerSite = new int[allProbes[p].length()];

            int usableCounts = 0;

            for (int r = 0; r < reads.length; r++) {
                if (readFilter.useRead(allProbes[p], reads[r])) {
                    ++usableCounts;
                    for (int pos = Math.max(0, SequenceRead.start(reads[r]) - allProbes[p].start()); pos <= Math
                            .min(countsPerSite.length - 1,
                                    SequenceRead.end(reads[r]) - allProbes[p].start()); pos++) {
                        ++countsPerSite[pos];
                    }
                }
            }

            if (usableCounts / (allProbes[p].length() / 1000d) >= minDensity) {
                // We're going to do a linear regression rather than a correlation

                // We're analysing in bins so we'll work out the bin counts and
                // add them dynamically to the regression.

                SimpleRegression regression = new SimpleRegression();

                int binCount = 0;
                for (int i = 0; i < countsPerSite.length; i++) {
                    if (i > 0 && i % binSize == 0) {
                        regression.addData(i, binCount);
                        binCount = 0;
                    }

                    binCount += countsPerSite[i];
                }

                float slope = (float) (regression.getSlope() * 1000000);
                double pValue = regression.getSignificance();

                if (allProbes[p].strand() == Location.REVERSE) {
                    slope = 0 - slope;
                }

                if (pValue <= maxPValue) {
                    data[d].setValueForProbe(allProbes[p], slope);
                } else {
                    data[d].setValueForProbe(allProbes[p], Float.NaN);
                }
            } else {
                data[d].setValueForProbe(allProbes[p], Float.NaN);
            }

        }
    }

    StringBuffer quantitationDescription = new StringBuffer();
    quantitationDescription.append("Intron regression pipeline quantitation ");
    quantitationDescription.append(". Directionality was ");
    quantitationDescription.append(optionsPanel.libraryTypeBox.getSelectedItem());
    quantitationDescription.append(". Min intron length was ");
    quantitationDescription.append(minLength);
    quantitationDescription.append(". Min read density was ");
    quantitationDescription.append(minDensity);
    quantitationDescription.append(". Max slope p-value was ");
    quantitationDescription.append(maxPValue);

    collection().probeSet().setCurrentQuantitation(quantitationDescription.toString());

    quantitatonComplete();

}

From source file:edu.umn.cs.spatialHadoop.mapred.CombinedSpatialInputFormat.java

@Override
public InputSplit[] getSplits(final JobConf job, int numSplits) throws IOException {

    final Path[] inputFiles = getInputPaths(job);
    final Vector<InputSplit> combinedSplits = new Vector<InputSplit>();
    InputSplit[][] inputSplits = new InputSplit[inputFiles.length][];

    @SuppressWarnings("unchecked")
    GlobalIndex<Partition> gIndexes[] = new GlobalIndex[inputFiles.length];
    for (int i_file = 0; i_file < inputFiles.length; i_file++) {
        FileSystem fs = inputFiles[i_file].getFileSystem(job);
        gIndexes[i_file] = SpatialSite.getGlobalIndex(fs, inputFiles[i_file]);
        if (gIndexes[i_file] != null) {
            final Path currentInputFile = inputFiles[i_file];
            CellInfo[] cellsInfo = SpatialSite.cellsOf(fs, inputFiles[i_file]);
            for (CellInfo cellInfo : cellsInfo) {
                gIndexes[i_file].rangeQuery(cellInfo, new ResultCollector<Partition>() {
                    @Override/*from  www .ja v  a2 s.c o  m*/
                    public void collect(Partition p) {
                        try {
                            List<FileSplit> fileSplits = new ArrayList<FileSplit>();
                            Path splitPath = new Path(currentInputFile, p.filename);
                            splitFile(job, splitPath, fileSplits);

                            for (FileSplit fileSplit : fileSplits) {
                                combinedSplits.add(fileSplit);
                            }
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }
                });
            }
        } else {
            JobConf temp = new JobConf(job);
            setInputPaths(temp, inputFiles[i_file]);
            inputSplits[i_file] = super.getSplits(temp, 1);
            for (InputSplit currentSplit : inputSplits[i_file]) {
                combinedSplits.add(currentSplit);
            }
        }

    }
    LOG.info("Combined " + combinedSplits.size() + " file splits");

    return combinedSplits.toArray(new InputSplit[combinedSplits.size()]);
}

From source file:de.juwimm.cms.remote.UserServiceSpringImpl.java

/**
 * Returns all users regardings the active site, the given Group and the
 * given unit.<br>/*  w  w  w  . j  a v a 2  s . c o m*/
 * 
 * @param groupId
 *            The Group
 * @param unitId
 *            The Unit
 * @return Returns all UserValue Objects in an Array. Is empty if nobody was
 *         found.
 * 
 * @see de.juwimm.cms.remote.UserServiceSpring#getAllUser4GroupAndUnit(java.lang.Integer,
 *      java.lang.Integer)
 */
@Override
protected UserValue[] handleGetAllUser4GroupAndUnit(Integer groupId, Integer unitId) throws Exception {
    Vector<UserValue> vec = new Vector<UserValue>();
    try {
        UserHbm userMe = super.getUserHbmDao().load(AuthenticationHelper.getUserName());
        if (getUserHbmDao().isInUnit(unitId, userMe)) {
            UnitHbm unit = super.getUnitHbmDao().load(unitId);
            Iterator it = unit.getUsers().iterator();
            while (it.hasNext()) {
                UserHbm user = (UserHbm) it.next();
                if (user.isInGroup(groupId)
                        && !!getUserHbmDao().isInRole(user, UserRights.SITE_ROOT, user.getActiveSite())) {
                    vec.addElement(user.getUserValue());
                }
            }
        }
    } catch (Exception e) {
        throw new UserException(e.getMessage());
    }
    return vec.toArray(new UserValue[0]);
}