Example usage for java.util Vector toArray

List of usage examples for java.util Vector toArray

Introduction

In this page you can find the example usage for java.util Vector toArray.

Prototype

@SuppressWarnings("unchecked")
public synchronized <T> T[] toArray(T[] a) 

Source Link

Document

Returns an array containing all of the elements in this Vector in the correct order; the runtime type of the returned array is that of the specified array.

Usage

From source file:com.bailen.radioOnline.recursos.REJA.java

public Cancion[] getRatings(String apiKey) throws IOException {
    HttpHeaders header = new HttpHeaders();
    header.set("Authorization", apiKey);
    HttpEntity entity = new HttpEntity(header);
    String lista = new String();
    HttpEntity<String> response;
    response = new RestTemplate().exchange("http://ceatic.ujaen.es:8075/radioapi/v2/ratings", HttpMethod.GET,
            entity, String.class, lista);

    String canc = response.getBody();
    StringTokenizer st = new StringTokenizer(canc, "[", true);
    st.nextToken();/*ww w  . java  2 s  .  c om*/
    if (!st.hasMoreTokens()) {
        return null;
    }
    st.nextToken();
    canc = "[" + st.nextToken();

    try {

        ObjectMapper a = new ObjectMapper();
        ItemPuntu[] listilla = a.readValue(canc, ItemPuntu[].class);
        Vector<Integer> ids = new Vector<>();
        Vector<Cancion> punt = new Vector<>();
        //como jamendo solo devuelve 10 canciones llamamos las veces necesarias
        for (int i = 0; i < (listilla.length / 10) + 1; ++i) {
            ids.clear();
            //aunque le mandemos mas ids de la cuenta solo devolvera las 10 primeras canciones y 
            //de esta forma controlamos el desborde
            for (int j = i * 10; j < listilla.length; ++j) {
                ids.add(listilla[j].getId());
            }
            Cancion[] listilla1 = jamendo.canciones(ids);
            for (int k = 0; k < listilla1.length; ++k) {
                punt.add(listilla1[k]);
            }
        }

        for (int i = 0; i < punt.size(); ++i) {
            punt.get(i).setRating(listilla[i].getRating());
            punt.get(i).setFav(listilla[i].getFav());
        }

        return punt.toArray(new Cancion[punt.size()]);

    } catch (Exception e) {
        //return null;
        throw new IOException("no se han recibido canciones");
    }

}

From source file:org.fcrepo.server.access.DefaultAccess.java

private String[] getRetainPIDs() {
    String retainPIDsCSV = convertToCSV(
            getServer().getModule("org.fcrepo.server.storage.DOManager").getParameter("retainPIDs"));
    Vector<Object> retainPIDs = new Vector<Object>();
    StringTokenizer st = new StringTokenizer(retainPIDsCSV, ",");
    while (st.hasMoreElements()) {
        retainPIDs.add(st.nextElement());
    }//from   w w w.j  av  a 2s. c  o m
    return retainPIDs.toArray(EMPTY_STRING_ARRAY);
}

From source file:edu.cornell.med.icb.geo.tools.MicroarrayTrainEvaluate.java

public GeneList[] readGeneList(final String platformFilename, final String geneListFilename)
        throws IOException, SyntaxErrorException {
    // read platforms:
    final Vector<GEOPlatform> platforms = new Vector<GEOPlatform>();
    final String[] platformFilenames = platformFilename.split(",");
    for (final String pFilename : platformFilenames) {
        platform = new GEOPlatform();
        if ("dummy".equals(pFilename)) {
            platform = new DummyPlatform();
            platforms.add(platform);//from   w  w w. j  a v  a  2 s  .  com
            System.out.println("Will proceed with dummy platform.");
            break;
        }
        System.out.print("Reading platform " + pFilename + ".. ");
        System.out.flush();

        platform.read(pFilename);
        System.out.println("done.");
        System.out.flush();
        platforms.add(platform);
    }

    // read gene list info:
    final BufferedReader geneListReader = new BufferedReader(new FileReader(geneListFilename));
    String line;
    final Vector<GeneList> list = new Vector<GeneList>();
    while ((line = geneListReader.readLine()) != null) {
        if (line.startsWith("#")) {
            continue;
        }
        final String[] tokens = line.split("[\t]");
        assert tokens.length >= 1 : "gene list line must have at least 1 field. Line was :" + line;
        final GeneList geneList = GeneList.createList(tokens);
        geneList.setPlatforms(platforms);
        list.add(geneList);
    }
    return list.toArray(new GeneList[list.size()]);
}

From source file:org.ow2.aspirerfid.reader.rp.hal.impl.intermecif5.IntermecIF5Controller.java

public String[] getAllParameterNames() throws HardwareException, UnsupportedOperationException {

    try {//from  w  w  w  .  j  av  a 2  s.c o  m
        Iterator it = config.getKeys();
        Vector<String> names = new Vector<String>();
        Object item;
        while (it.hasNext()) {
            item = it.next();
            if (String.class.isInstance(item)) {
                names.add((String) item);
            }
        }
        String[] namesarray = new String[names.size()];
        namesarray = names.toArray(namesarray);
        return namesarray;
    } catch (Exception e) {
        log.error("getAllParameterNames: Error gettings parameter names", e);
        throw new HardwareException("Error getting parameter names", e);
    }

}

From source file:net.sf.mzmine.modules.peaklistmethods.identification.metamsecorrelate.MetaMSEcorrelateTask.java

/**
 * feature shape correlation/*from  w  w  w  . jav  a 2  s .co m*/
 * @param f1
 * @param f2
 * @return feature shape correlation 
 * or null if not possible
 * not enough data points for a correlation
 * @throws Exception 
 */
public static FeatureShapeCorrelationData corrFeatureShape(Feature f1, Feature f2, boolean sameRawFile)
        throws Exception {
    //Range<Double> rt1 = f1.getRawDataPointsRTRange();
    //Range<Double> rt2 = f2.getRawDataPointsRTRange();
    if (sameRawFile) {
        // scan numbers (not necessary 1,2,3...)
        int[] sn1 = f1.getScanNumbers();
        int[] sn2 = f2.getScanNumbers();
        int offsetI1 = 0;
        int offsetI2 = 0;
        // find corresponding value
        if (sn2[0] > sn1[0]) {
            for (int i = 1; i < sn1.length; i++) {
                if (sn1[i] == sn2[0]) {
                    offsetI1 = i;
                    break;
                }
            }
            // peaks are not overlapping
            if (offsetI1 == 0)
                return null;
        }
        if (sn2[0] < sn1[0]) {
            for (int i = 1; i < sn2.length; i++) {
                if (sn1[0] == sn2[i]) {
                    offsetI2 = i;
                    break;
                }
            }
            // peaks are not overlapping
            if (offsetI2 == 0)
                return null;
        }
        // only correlate intercepting areas 0-max
        int max = 0;
        if (sn1.length - offsetI1 <= sn2.length - offsetI2)
            max = sn1.length - offsetI1;
        if (sn1.length - offsetI1 > sn2.length - offsetI2)
            max = sn2.length - offsetI2;
        if (max - offsetI1 > minCorrelatedDataPoints && max - offsetI2 > minCorrelatedDataPoints) {
            RawDataFile raw = f1.getDataFile();
            SimpleRegression reg = new SimpleRegression();
            // save max and min of intensity of val1(x)
            double maxX = 0;
            double minX = Double.POSITIVE_INFINITY;
            Vector<Double> I1 = new Vector<Double>();
            Vector<Double> I2 = new Vector<Double>();
            // add all data points over a given threshold
            // raw data (not smoothed)
            for (int i = 0; i < max; i++) {
                if (sn1[i + offsetI1] != sn2[i + offsetI2])
                    throw new Exception("Scans are not the same for peak shape corr");
                double val1 = f1.getDataPoint(sn1[i + offsetI1]).getIntensity();
                double val2 = f2.getDataPoint(sn2[i + offsetI2]).getIntensity();
                if (val1 >= noiseLevelShapeCorr && val2 >= noiseLevelShapeCorr) {
                    reg.addData(val1, val2);
                    if (val1 < minX)
                        minX = val1;
                    if (val1 > maxX)
                        maxX = val1;
                    I1.add(val1);
                    I2.add(val2);
                }
            }
            // return pearson r
            if (reg.getN() >= minCorrelatedDataPoints) {
                Double[][] data = new Double[][] { I1.toArray(new Double[I1.size()]),
                        I2.toArray(new Double[I2.size()]) };
                return new FeatureShapeCorrelationData(reg, data, minX, maxX);
            }
        }
    } else {
        // TODO if different raw file search for same rt
        // impute rt/I values if between 2 data points
    }
    return null;
}

From source file:com.example.android.myargmenuplanner.data.LoadMenu.java

@Override
protected String[] doInBackground(String... params) {

    Uri mUri = MenuEntry.CONTENT_URI;

    Cursor mCursor = mContext.getContentResolver().query(mUri, null, null, null, null);

    if (mCursor.getCount() == 0) {

        int shift = 0;
        DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
        Calendar cal = Calendar.getInstance();
        String sDate = df.format(cal.getTime());
        int dayofweek = cal.get(Calendar.DAY_OF_WEEK);

        Log.i(LOG_TAG, "Init Date: " + date);
        Vector<ContentValues> cVVector = new Vector<ContentValues>(14 - dayofweek);

        for (int i = dayofweek; i <= 14; i++) {

            ContentValues values = new ContentValues();

            values.put(MenuEntry.COLUMN_DATE, sDate);
            values.put(MenuEntry.COLUMN_LUNCH, "Empty");
            values.put(MenuEntry.COLUMN_ID_LUNCH, 0);
            values.put(MenuEntry.COLUMN_DINNER, "Empty");
            values.put(MenuEntry.COLUMN_ID_DINNER, 0);

            cVVector.add(values);/*from w ww. j  av a2  s  . c om*/

            cal.add(Calendar.DATE, 1);
            sDate = df.format(cal.getTime());
            //            Log.i(LOG_TAG, "Day of the week: "+cal.get(Calendar.DAY_OF_WEEK));
            //            Log.i(LOG_TAG, "Date: "+date);

        }

        int inserted = 0;

        //            // add to database
        Log.i(LOG_TAG, "Creando registros en base de datos. Tabla Menu ");

        if (cVVector.size() > 0) {
            ContentValues[] cvArray = new ContentValues[cVVector.size()];
            cVVector.toArray(cvArray);

            inserted = mContext.getContentResolver().bulkInsert(MenuEntry.CONTENT_URI, cvArray);
            Log.i(LOG_TAG, "Registros nuevos creados en Tabla Menu: " + inserted);
        }

    } else { //ya tengo registros, tengo que fijarme las fechas

        DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
        Calendar cal = Calendar.getInstance();
        String dateNow = df.format(cal.getTime());
        int dayofweek = cal.get(Calendar.DAY_OF_WEEK);
        String date = "";
        String week = "";
        while (mCursor.moveToNext()) {

            date = mCursor.getString(1);

            if (dateNow.equals(date)) {

            }
        }

    }

    return null;
}

From source file:gda.scan.ScanDataPoint.java

/**
 * Returns the values held by this ScanDataPoint of Scannables, Monitors and Detectors.
 * /*from   w w  w .jav  a  2 s . co  m*/
 * @return an array of Double of length getMonitorHeader().size() + getPositionHeader().size() +
 *         getDetectorHeader().size() if the conversion of a field to Double is not possible then the element of the
 *         array will be null
 * @throws IllegalArgumentException
 *             if the fields convert to too few values
 * @throws IndexOutOfBoundsException
 *             if the fields convert to too many values
 */
@Override
public Double[] getAllValuesAsDoubles() throws IllegalArgumentException, IndexOutOfBoundsException {
    if (allValuesAsDoubles == null) {
        Double[] scannablePosAsDoubles = getPositionsAsDoubles();
        Double[] detectorDataAsDoubles = getDetectorDataAsDoubles();

        Vector<Double> output = new Vector<Double>();
        output.addAll(Arrays.asList(scannablePosAsDoubles));
        output.addAll(Arrays.asList(detectorDataAsDoubles));
        allValuesAsDoubles = output.toArray(new Double[] {});
    }
    return allValuesAsDoubles;
}

From source file:org.mrgeo.rasterops.GeoTiffExporter.java

public static void export(final RenderedImage image, final Bounds bounds, final OutputStream os,
        final boolean replaceNan, final String xmp, final Number nodata) throws IOException {
    OpImageRegistrar.registerMrGeoOps();

    final TIFFEncodeParam param = new TIFFEncodeParam();
    // The version of GDAL that Legion is using requires a tile size > 1
    param.setTileSize(image.getTileWidth(), image.getTileHeight());
    param.setWriteTiled(true);//from   w ww  .  j  a va2 s .c  o  m

    // if the image only has 1 pixel, the value of this pixel changes after compressing (especially
    // if this pixel is no data value. e.g -9999 changes to -8192 when read the image back).
    // So don't do compress if the image has only 1 pixel.
    if (image.getWidth() > 1 && image.getHeight() > 1) {
        // Deflate lossless compression (also known as "Zip-in-TIFF")
        param.setCompression(TIFFEncodeParam.COMPRESSION_DEFLATE);
        param.setDeflateLevel(Deflater.BEST_COMPRESSION);
    }

    final GeoTIFFDirectory dir = new GeoTIFFDirectory();

    // GTModelTypeGeoKey : using geographic coordinate system.
    dir.addGeoKey(new XTIFFField(1024, XTIFFField.TIFF_SHORT, 1, new char[] { 2 }));
    // GTRasterTypeGeoKey : pixel is point
    dir.addGeoKey(new XTIFFField(1025, XTIFFField.TIFF_SHORT, 1, new char[] { 1 }));
    // GeographicTypeGeoKey : 4326 WGS84
    dir.addGeoKey(new XTIFFField(2048, XTIFFField.TIFF_SHORT, 1, new char[] { 4326 }));
    dir.addGeoKey(new XTIFFField(2049, XTIFFField.TIFF_ASCII, 7, new String[] { "WGS 84" }));
    // GeogAngularUnitsGeoKey : Angular Degree
    dir.addGeoKey(new XTIFFField(2054, XTIFFField.TIFF_SHORT, 1, new char[] { 9102 }));
    if (xmp != null) {
        final byte[] b = xmp.getBytes("UTF8");
        dir.addField(new XTIFFField(700, XTIFFField.TIFF_BYTE, b.length, b));
    }
    dir.getFields();

    final double[] tiePoints = new double[6];
    tiePoints[0] = 0.0;
    tiePoints[1] = 0.0;
    tiePoints[2] = 0.0;
    tiePoints[3] = bounds.getMinX();
    tiePoints[4] = bounds.getMaxY();
    tiePoints[5] = 0.0;
    dir.setTiepoints(tiePoints);
    final double[] pixelScale = new double[3];
    pixelScale[0] = bounds.getWidth() / image.getWidth();
    pixelScale[1] = bounds.getHeight() / image.getHeight();
    pixelScale[2] = 0;
    dir.setPixelScale(pixelScale);

    final Vector<TIFFField> fields = toTiffField(dir.getFields());

    RenderedImage output = image;

    final String[] nullValues = new String[1];
    switch (image.getSampleModel().getDataType()) {
    case DataBuffer.TYPE_DOUBLE:
        nullValues[0] = Double.toString(nodata.doubleValue());
        if (replaceNan) {
            output = ReplaceNanDescriptor.create(image, nodata.doubleValue());
        }
        // Tiff exporter doesn't handle doubles. Yuck!
        output = ConvertToFloatDescriptor.create(output);

        // Double.NaN (our default nodata on ingest) should not be written out as nodata on export
        // (i.e. GeoTiffs imported without NODATA metadata field should be exported as such)
        if (!Double.isNaN(nodata.doubleValue())) {
            fields.add(new TIFFField(NULL_TAG, XTIFFField.TIFF_ASCII, 1, nullValues));
        }
        break;
    case DataBuffer.TYPE_FLOAT:
        nullValues[0] = Double.toString(nodata.floatValue());
        if (replaceNan) {
            output = ReplaceNanDescriptor.create(image, nodata.floatValue());
        }
        // Float.NaN (our default nodata on ingest) should not be written out as nodata on export
        // (i.e. GeoTiffs imported without NODATA metadata field should be exported as such)
        if (!Float.isNaN(nodata.floatValue())) {
            fields.add(new TIFFField(NULL_TAG, XTIFFField.TIFF_ASCII, 1, nullValues));
        }
        break;
    case DataBuffer.TYPE_INT:
    case DataBuffer.TYPE_USHORT:
    case DataBuffer.TYPE_SHORT:
    case DataBuffer.TYPE_BYTE:
        nullValues[0] = Integer.toString(nodata.intValue());
        fields.add(new TIFFField(NULL_TAG, XTIFFField.TIFF_ASCII, 1, nullValues));
        break;
    }

    param.setExtraFields(fields.toArray(new TIFFField[0]));

    EncodeDescriptor.create(output, os, "TIFF", param, null);
}

From source file:org.sakaiproject.poll.service.impl.PollListManagerImpl.java

protected String[] split(String source, String splitter) {
    // hold the results as we find them
    Vector<String> rv = new Vector<String>();
    int last = 0;
    int next = 0;
    do {/* w w  w .jav  a 2s.c o  m*/
        // find next splitter in source
        next = source.indexOf(splitter, last);
        if (next != -1) {
            // isolate from last thru before next
            rv.add(source.substring(last, next));
            last = next + splitter.length();
        }
    } while (next != -1);
    if (last < source.length()) {
        rv.add(source.substring(last, source.length()));
    }

    // convert to array
    return (String[]) rv.toArray(new String[rv.size()]);

}

From source file:de.juwimm.cms.authorization.remote.AuthorizationServiceSpringImpl.java

/**
 * @see de.juwimm.cms.authorization.remote.AuthorizationServiceSpring#getUnits()
 *///from   ww w.j  a va  2  s  . c  o  m
@Override
protected UnitValue[] handleGetUnits() throws Exception {
    Vector<UnitValue> vec = new Vector<UnitValue>();
    try {
        if (log.isDebugEnabled())
            log.debug("begin getUnits");
        UserHbm user = super.getUserHbmDao().load(AuthenticationHelper.getUserName());
        Iterator iterator = null;
        if (getUserHbmDao().isInRole(user, UserRights.SITE_ROOT, user.getActiveSite())) {
            iterator = super.getUnitHbmDao().findAll(user.getActiveSite().getSiteId()).iterator();
        } else {
            iterator = super.getUserHbmDao().getUnits4ActiveSite(user).iterator();
        }
        UnitHbm unit;
        while (iterator.hasNext()) {
            unit = (UnitHbm) iterator.next();
            UnitValue dao = getUnitHbmDao().getDao(unit);
            vec.addElement(dao);
        }
        if (log.isDebugEnabled())
            log.debug("end getUnits");
    } catch (Exception e) {
        throw new UserException(e.getMessage());
    }
    return vec.toArray(new UnitValue[0]);
}