Example usage for java.util Vector size

List of usage examples for java.util Vector size

Introduction

In this page you can find the example usage for java.util Vector size.

Prototype

public synchronized int size() 

Source Link

Document

Returns the number of components in this vector.

Usage

From source file:DatabaseBrowser.java

protected void populateCatalogBox() {
    try {//from   w  w  w  .j a v a  2 s .c  o  m
        DatabaseMetaData dmd = connection.getMetaData();
        ResultSet rset = dmd.getCatalogs();
        Vector values = new Vector();
        while (rset.next()) {
            values.addElement(rset.getString(1));
        }
        rset.close();
        catalogBox.setModel(new DefaultComboBoxModel(values));
        catalogBox.setSelectedItem(connection.getCatalog());
        catalogBox.setEnabled(values.size() > 0);
    } catch (Exception e) {
        catalogBox.setEnabled(false);
    }
}

From source file:gov.nih.nci.evs.reportwriter.utils.SimpleSearchUtils.java

public ResolvedConceptReferencesIteratorWrapper search(Vector<String> schemes, Vector<String> versions,
        String matchText, int searchOption, String algorithm) throws LBException {

    if (schemes == null || versions == null)
        return null;
    if (schemes.size() != versions.size())
        return null;
    if (schemes.size() == 0)
        return null;
    if (matchText == null)
        return null;
    if (searchOption != BY_CODE && searchOption != BY_NAME)
        return null;
    if (searchOption != BY_CODE && algorithm == null)
        return null;

    LexBIGService lbSvc = null;// w ww .j  av a 2s  .  com

    SearchExtension searchExtension = null;
    try {
        lbSvc = new RemoteServerUtil().createLexBIGService();

        if (lbSvc == null) {
            return null;
        }
        searchExtension = (SearchExtension) lbSvc.getGenericExtension("SearchExtension");
    } catch (Exception e) {
        _logger.warn("SearchExtension is not available.");
        return null;
    }

    Set<CodingSchemeReference> includes = new HashSet();

    for (int i = 0; i < schemes.size(); i++) {
        String scheme = (String) schemes.elementAt(i);
        String version = (String) versions.elementAt(i);
        CodingSchemeReference ref = new CodingSchemeReference();
        ref.setCodingScheme(scheme);

        if (version != null) {
            CodingSchemeVersionOrTag versionOrTag = new CodingSchemeVersionOrTag();
            versionOrTag.setVersion(version);
        }
        includes.add(ref);
    }

    ResolvedConceptReferencesIterator iterator = null;
    try {
        iterator = searchExtension.search(matchText, includes, converToMatchAlgorithm(searchOption, algorithm));
        printNumberOfMatches(iterator);

    } catch (Exception ex) {
        ex.printStackTrace();
    }
    if (iterator != null) {
        return new ResolvedConceptReferencesIteratorWrapper(iterator);
    }
    return null;
}

From source file:com.xmlcalabash.io.ReadableDocument.java

private void readDoc(StepContext stepContext) {
    runtime.getTracer().debug(null, stepContext, -1, this, null, "    DOCU > LOADING...");

    c_init.close(stepContext.curChannel);

    if (uri == null) {
        documents.addChannel(stepContext.curChannel);
        runtime.getTracer().debug(null, stepContext, -1, this, null, "    DOCU > NOTHING");
    } else {//from  w ww. ja v a2 s  .c  om
        if (doc == null) {
            try {
                // What if this is a directory?
                String fn = uri;
                if (fn.startsWith("file:")) {
                    fn = fn.substring(5);
                    if (fn.startsWith("///")) {
                        fn = fn.substring(2);
                    }
                }
                File f = new File(fn);
                if (f.isDirectory()) {
                    if (pattern == null) {
                        pattern = Pattern.compile("^.*\\.xml$");
                    }
                    for (File file : f.listFiles(new RegexFileFilter(pattern))) {
                        doc = runtime.parse(file.getCanonicalPath(), base);
                        documents.newPipedDocument(stepContext.curChannel, doc);
                    }
                } else {
                    doc = null;
                    boolean json = false;
                    try {
                        doc = runtime.parse(uri, base);
                    } catch (XProcException xe) {
                        if (runtime.transparentJSON()) {
                            try {
                                URI baseURI = new URI(base);
                                URL url = baseURI.resolve(uri).toURL();
                                URLConnection conn = url.openConnection();
                                InputStreamReader reader = new InputStreamReader(conn.getInputStream());
                                JSONTokener jt = new JSONTokener(reader);
                                doc = JSONtoXML.convert(runtime.getProcessor(), jt, runtime.jsonFlavor());
                                documents.newPipedDocument(stepContext.curChannel, doc);
                                json = true;
                            } catch (Exception e) {
                                throw xe;
                            }
                        } else {
                            throw xe;
                        }
                    }
                    if (!json) {
                        if (fn.contains("#")) {
                            int pos = fn.indexOf("#");
                            String ptr = fn.substring(pos + 1);
                            if (ptr.matches("^[\\w]+$")) {
                                ptr = "element(" + ptr + ")";
                            }
                            XPointer xptr = new XPointer(ptr);
                            Vector<XdmNode> nodes = xptr.selectNodes(runtime, doc);
                            if (nodes.size() == 1) {
                                doc = nodes.get(0);
                            } else if (nodes.size() != 0) {
                                throw new XProcException(node, "XPointer matches more than one node!?");
                            }
                        }
                    }
                }
            } catch (Exception except) {
                throw XProcException.dynamicError(11, node, except, "Could not read: " + uri);
            }
        }

        documents.newPipedDocument(stepContext.curChannel, doc);
        runtime.getTracer().debug(null, stepContext, -1, this, null, "    DOCU > LOADED");
    }

    // close documents        
    documents.close(stepContext.curChannel);
}

From source file:de.betterform.xml.xforms.model.constraints.MainDependencyGraph.java

/**
 * Adds a single bind's ref node to the Main Graph
 * called by MainDependencyGraph.buildBindGraph()
 *//*from   www.j a  v a2s.c  o  m*/
private void addReferredNodesToGraph(BetterFormXPathContext relativeContext, Node instanceNode,
        String expression, short property, Set references, String customMIP) throws XFormsException {
    //creates a new vertex for this Node or return it, in case it already existed
    //RKU
    Vertex vertex = this.addVertex(relativeContext, instanceNode, expression, property, customMIP);
    boolean hadVertex = vertex.wasAlreadyInGraph;
    vertex.wasAlreadyInGraph = false;

    // Analyze the Xpath Expression 'calculate'. Read nodeset RefNS
    // (the nodes this XPAth references)
    String xpath = vertex.getXPathExpression();
    //        String xpath = expression;
    vertex.setXpathExpression(expression);

    if ((xpath == null) || (xpath.length() == 0)) {
        // bind without xpath, remove vertex
        if (!hadVertex) {
            this.removeVertex(vertex);
        }

        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("addReferredNodesToGraph: ignoring vertex " + vertex + " without xpath");
        }

        return;
    }

    //Analyse xpath-expression to determine the Referenced dataitems
    Vector refns = this.getXPathRefNodes(relativeContext, xpath, references);

    if (refns == null) {
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("addReferredNodesToGraph: ignoring vertex " + vertex + " without references");
        }

        return;
    }

    if (refns.size() == 0) {
        // this is a calculated value, that is not depending on anything, let's calculate it now
        vertex.compute();
    }

    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug("addReferredNodesToGraph: processing vertex " + vertex + " with " + refns.size()
                + " references");
    }

    Enumeration enumeration = refns.elements();

    while (enumeration.hasMoreElements()) {
        Node referencedNode = (Node) enumeration.nextElement();

        // pre-build vertex
        Vertex refVertex = this.addVertex(null, referencedNode, null, Vertex.CALCULATE_VERTEX, null);
        this.addEdge(refVertex, vertex);
    }
}

From source file:javazoom.jlgui.player.amp.playlist.BasePlaylist.java

/**
 * Shuffles items in the playlist randomly
 *//*  w  w w  .j  ava 2  s  .c  om*/
public void shuffle() {
    int size = _playlist.size();
    if (size < 2) {
        return;
    }
    Vector v = _playlist;
    _playlist = new Vector(size);
    while ((size = v.size()) > 0) {
        _playlist.addElement(v.remove((int) (Math.random() * size)));
    }
    begin();
}

From source file:com.clustercontrol.poller.impl.WbemPollerImpl.java

private boolean checkCIMData(CIMValue value) {
    if (value == null) {
        m_log.info("checkCIMData : value is null");
        return false;
    }//from   ww  w  . j  a v  a2  s.c  o  m
    if (value.getType() == null) {
        m_log.info("checkCIMData : value.getType is null, " + "refClassName="
                + value.getType().getRefClassName() + "stringType=" + value.getType().getStringType()
                + "toString=" + value.getType().toString());
        return false;
    }
    int type = value.getType().getType();
    if (type == CIMDataType.UINT8) {
    } else if (type == CIMDataType.UINT16) {
    } else if (type == CIMDataType.UINT32) {
    } else if (type == CIMDataType.UINT64) {
    } else if (type == CIMDataType.STRING) {
    } else if (type == CIMDataType.STRING_ARRAY) {
        @SuppressWarnings("unchecked")
        Vector<String> ret = (Vector<String>) value.getValue();
        if (ret.size() == 0) {
            m_log.info("checkCIMData : CIMValue has fault. : ip=" + m_ipAddress);
            return false;
        }
    }
    return true;
}

From source file:xiaofans.threadsample.RSSPullService.java

/**
 * In an IntentService, onHandleIntent is run on a background thread.  As it
 * runs, it broadcasts its current status using the LocalBroadcastManager.
 * @param workIntent The Intent that starts the IntentService. This Intent contains the
 * URL of the web site from which the RSS parser gets data.
 *//*from w ww. j  av  a  2 s.  c o  m*/
@Override
protected void onHandleIntent(Intent workIntent) {
    // Gets a URL to read from the incoming Intent's "data" value
    String localUrlString = workIntent.getDataString();

    // Creates a projection to use in querying the modification date table in the provider.
    final String[] dateProjection = new String[] { DataProviderContract.ROW_ID,
            DataProviderContract.DATA_DATE_COLUMN };

    // A URL that's local to this method
    URL localURL;

    // A cursor that's local to this method.
    Cursor cursor = null;

    /*
     * A block that tries to connect to the Picasa featured picture URL passed as the "data"
     * value in the incoming Intent. The block throws exceptions (see the end of the block).
     */
    try {

        // Convert the incoming data string to a URL.
        localURL = new URL(localUrlString);

        /*
         * Tries to open a connection to the URL. If an IO error occurs, this throws an
         * IOException
         */
        URLConnection localURLConnection = localURL.openConnection();

        // If the connection is an HTTP connection, continue
        if ((localURLConnection instanceof HttpURLConnection)) {

            // Broadcasts an Intent indicating that processing has started.
            mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_STARTED);

            // Casts the connection to a HTTP connection
            HttpURLConnection localHttpURLConnection = (HttpURLConnection) localURLConnection;

            // Sets the user agent for this request.
            localHttpURLConnection.setRequestProperty("User-Agent", Constants.USER_AGENT);

            /*
             * Queries the content provider to see if this URL was read previously, and when.
             * The content provider throws an exception if the URI is invalid.
             */
            cursor = getContentResolver().query(DataProviderContract.DATE_TABLE_CONTENTURI, dateProjection,
                    null, null, null);

            // Flag to indicate that new metadata was retrieved
            boolean newMetadataRetrieved;

            /*
             * Tests to see if the table contains a modification date for the URL
             */
            if (null != cursor && cursor.moveToFirst()) {

                // Find the URL's last modified date in the content provider
                long storedModifiedDate = cursor
                        .getLong(cursor.getColumnIndex(DataProviderContract.DATA_DATE_COLUMN));

                /*
                 * If the modified date isn't 0, sets another request property to ensure that
                 * data is only downloaded if it has changed since the last recorded
                 * modification date. Formats the date according to the RFC1123 format.
                 */
                if (0 != storedModifiedDate) {
                    localHttpURLConnection.setRequestProperty("If-Modified-Since",
                            org.apache.http.impl.cookie.DateUtils.formatDate(new Date(storedModifiedDate),
                                    org.apache.http.impl.cookie.DateUtils.PATTERN_RFC1123));
                }

                // Marks that new metadata does not need to be retrieved
                newMetadataRetrieved = false;

            } else {

                /*
                 * No modification date was found for the URL, so newmetadata has to be
                 * retrieved.
                 */
                newMetadataRetrieved = true;

            }

            // Reports that the service is about to connect to the RSS feed
            mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_CONNECTING);

            // Gets a response code from the RSS server
            int responseCode = localHttpURLConnection.getResponseCode();

            switch (responseCode) {

            // If the response is OK
            case HttpStatus.SC_OK:

                // Gets the last modified data for the URL
                long lastModifiedDate = localHttpURLConnection.getLastModified();

                // Reports that the service is parsing
                mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_PARSING);

                /*
                 * Instantiates a pull parser and uses it to parse XML from the RSS feed.
                 * The mBroadcaster argument send a broadcaster utility object to the
                 * parser.
                 */
                RSSPullParser localPicasaPullParser = new RSSPullParser();

                localPicasaPullParser.parseXml(localURLConnection.getInputStream(), mBroadcaster);

                // Reports that the service is now writing data to the content provider.
                mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_WRITING);

                // Gets image data from the parser
                Vector<ContentValues> imageValues = localPicasaPullParser.getImages();

                // Stores the number of images
                int imageVectorSize = imageValues.size();

                // Creates one ContentValues for each image
                ContentValues[] imageValuesArray = new ContentValues[imageVectorSize];

                imageValuesArray = imageValues.toArray(imageValuesArray);

                /*
                 * Stores the image data in the content provider. The content provider
                 * throws an exception if the URI is invalid.
                 */
                getContentResolver().bulkInsert(DataProviderContract.PICTUREURL_TABLE_CONTENTURI,
                        imageValuesArray);

                // Creates another ContentValues for storing date information
                ContentValues dateValues = new ContentValues();

                // Adds the URL's last modified date to the ContentValues
                dateValues.put(DataProviderContract.DATA_DATE_COLUMN, lastModifiedDate);

                if (newMetadataRetrieved) {

                    // No previous metadata existed, so insert the data
                    getContentResolver().insert(DataProviderContract.DATE_TABLE_CONTENTURI, dateValues);

                } else {

                    // Previous metadata existed, so update it.
                    getContentResolver().update(DataProviderContract.DATE_TABLE_CONTENTURI, dateValues,
                            DataProviderContract.ROW_ID + "="
                                    + cursor.getString(cursor.getColumnIndex(DataProviderContract.ROW_ID)),
                            null);
                }
                break;

            }

            // Reports that the feed retrieval is complete.
            mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_COMPLETE);
        }

        // Handles possible exceptions
    } catch (MalformedURLException localMalformedURLException) {

        localMalformedURLException.printStackTrace();

    } catch (IOException localIOException) {

        localIOException.printStackTrace();

    } catch (XmlPullParserException localXmlPullParserException) {

        localXmlPullParserException.printStackTrace();

    } finally {

        // If an exception occurred, close the cursor to prevent memory leaks.
        if (null != cursor) {
            cursor.close();
        }
    }
}

From source file:dao.DirSearchQuery.java

/**
 * This method lists all the results for the search text from directories
 * @param conn the connection/*from  w w  w .  j  a  v a 2  s.  c o  m*/
 * @param collabrumId the collabrumid
 * @return HashSet the set that has the list of moderators for these collabrums.
 * @throws BaseDaoException - when error occurs
 **/
public HashSet run(Connection conn, String stext) throws BaseDaoException {

    if ((RegexStrUtil.isNull(stext) || conn == null)) {
        return null;
    }
    ResultSet rs = null;

    StringBuffer sb = new StringBuffer(
            "select distinct directoryid, dirname, LEFT(dirdesc, 160) as info, hits, creationdate from directory where ");

    ArrayList columns = new ArrayList();
    columns.add("dirdesc");
    columns.add("dirname");
    columns.add("keywords");
    sb.append(sqlSearch.getConstraint(columns, stext));
    sb.append(" order by hits DESC");
    logger.info("search = " + sb.toString());

    try {
        PreparedStatement stmt = conn.prepareStatement(sb.toString());
        rs = stmt.executeQuery();

        Vector columnNames = null;
        Directory directory = null;
        HashSet pendingSet = new HashSet();

        if (rs != null) {
            columnNames = dbutils.getColumnNames(rs);
        } else {
            return null;
        }

        while (rs.next()) {
            directory = (Directory) eop.newObject(DbConstants.DIRECTORY);
            for (int j = 0; j < columnNames.size(); j++) {
                if (((String) (columnNames.elementAt(j))).equalsIgnoreCase("creationdate")) {
                    try {
                        directory.setValue("creationdate",
                                GlobalConst.dncalendar.getDisplayDate(rs.getTimestamp("creationdate")));
                    } catch (ParseException e) {
                        throw new BaseDaoException(
                                "could not parse the date for creationdate in DirSearchQuery()"
                                        + rs.getTimestamp("creationdate"),
                                e);
                    }
                } else {
                    directory.setValue((String) columnNames.elementAt(j),
                            (String) rs.getString((String) columnNames.elementAt(j)));
                }
            }
            pendingSet.add(directory);
        }
        return pendingSet;
    } catch (Exception e) {
        throw new BaseDaoException("Error occured while executing search in directory run query ", e);
    }
}

From source file:no.met.jtimeseries.netcdf.NetcdfChartProvider.java

public void getCsv(PrintStream out, Iterable<String> variables) throws ParseException, IOException {

    Vector<NumberPhenomenon> data = getWantedPhenomena(variables);

    // header/*from   w w w.j  a  v  a 2 s . c  o  m*/
    out.print("# Time");
    for (NumberPhenomenon p : data)
        out.print(",\t" + p.getPhenomenonName() + " (" + p.getPhenomenonUnit() + ")");
    out.println();

    TreeMap<Date, Double[]> displayData = new TreeMap<Date, Double[]>();
    for (int i = 0; i < data.size(); i++) {
        for (NumberValueItem atom : data.get(i)) {
            Double[] d = displayData.get(atom.getTimeFrom());
            if (d == null) {
                d = new Double[data.size()];
                displayData.put(atom.getTimeFrom(), d);
            }
            d[i] = atom.getValue();
        }
    }

    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
    for (Entry<Date, Double[]> element : displayData.entrySet()) {
        out.print(format.format(element.getKey()));
        Double[] d = element.getValue();
        for (int i = 0; i < d.length; i++)
            out.print(",\t" + d[i]);
        out.println();
    }
}

From source file:edu.umn.cs.spatialHadoop.nasa.MultiHDFPlot.java

public static boolean multiplot(Path[] input, Path output, OperationsParams params)
        throws IOException, InterruptedException, ClassNotFoundException, ParseException {
    String timeRange = params.get("time");
    final Date dateFrom, dateTo;
    final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy.MM.dd");
    try {// www.j  a  v a2  s.  c o  m
        String[] parts = timeRange.split("\\.\\.");
        dateFrom = dateFormat.parse(parts[0]);
        dateTo = dateFormat.parse(parts[1]);
    } catch (ArrayIndexOutOfBoundsException e) {
        System.err.println("Use the seperator two periods '..' to seperate from and to dates");
        return false; // To avoid an error that causes dateFrom to be uninitialized
    } catch (ParseException e) {
        System.err.println("Illegal date format in " + timeRange);
        return false;
    }
    // Number of frames to combine in each image
    int combine = params.getInt("combine", 1);
    // Retrieve all matching input directories based on date range
    Vector<Path> matchingPathsV = new Vector<Path>();
    for (Path inputFile : input) {
        FileSystem inFs = inputFile.getFileSystem(params);
        FileStatus[] matchingDirs = inFs.listStatus(input, new PathFilter() {
            @Override
            public boolean accept(Path p) {
                String dirName = p.getName();
                try {
                    Date date = dateFormat.parse(dirName);
                    return date.compareTo(dateFrom) >= 0 && date.compareTo(dateTo) <= 0;
                } catch (ParseException e) {
                    LOG.warn("Cannot parse directory name: " + dirName);
                    return false;
                }
            }
        });
        for (FileStatus matchingDir : matchingDirs)
            matchingPathsV.add(new Path(matchingDir.getPath(), "*.hdf"));
    }
    if (matchingPathsV.isEmpty()) {
        LOG.warn("No matching directories to given input");
        return false;
    }

    Path[] matchingPaths = matchingPathsV.toArray(new Path[matchingPathsV.size()]);
    Arrays.sort(matchingPaths);

    // Clear all paths to ensure we set our own paths for each job
    params.clearAllPaths();

    // Create a water mask if we need to recover holes on write
    if (params.get("recover", "none").equals("write")) {
        // Recover images on write requires a water mask image to be generated first
        OperationsParams wmParams = new OperationsParams(params);
        wmParams.setBoolean("background", false);
        Path wmImage = new Path(output, new Path("water_mask"));
        HDFPlot.generateWaterMask(wmImage, wmParams);
        params.set(HDFPlot.PREPROCESSED_WATERMARK, wmImage.toString());
    }
    // Start a job for each path
    int imageWidth = -1;
    int imageHeight = -1;
    boolean overwrite = params.getBoolean("overwrite", false);
    boolean pyramid = params.getBoolean("pyramid", false);
    FileSystem outFs = output.getFileSystem(params);
    Vector<Job> jobs = new Vector<Job>();
    boolean background = params.getBoolean("background", false);
    Rectangle mbr = new Rectangle(-180, -90, 180, 90);
    for (int i = 0; i < matchingPaths.length; i += combine) {
        Path[] inputPaths = new Path[Math.min(combine, matchingPaths.length - i)];
        System.arraycopy(matchingPaths, i, inputPaths, 0, inputPaths.length);
        Path outputPath = new Path(output, inputPaths[0].getParent().getName() + (pyramid ? "" : ".png"));
        if (overwrite || !outFs.exists(outputPath)) {
            // Need to plot
            Job rj = HDFPlot.plotHeatMap(inputPaths, outputPath, params);
            if (imageHeight == -1 || imageWidth == -1) {
                if (rj != null) {
                    imageHeight = rj.getConfiguration().getInt("height", 1000);
                    imageWidth = rj.getConfiguration().getInt("width", 1000);
                    mbr = (Rectangle) OperationsParams.getShape(rj.getConfiguration(), "mbr");
                } else {
                    imageHeight = params.getInt("height", 1000);
                    imageWidth = params.getInt("width", 1000);
                    mbr = (Rectangle) OperationsParams.getShape(params, "mbr");
                }
            }
            if (background && rj != null)
                jobs.add(rj);
        }
    }
    // Wait until all jobs are done
    while (!jobs.isEmpty()) {
        Job firstJob = jobs.firstElement();
        firstJob.waitForCompletion(false);
        if (!firstJob.isSuccessful()) {
            System.err.println("Error running job " + firstJob.getJobID());
            System.err.println("Killing all remaining jobs");
            for (int j = 1; j < jobs.size(); j++)
                jobs.get(j).killJob();
            throw new RuntimeException("Error running job " + firstJob.getJobID());
        }
        jobs.remove(0);
    }

    // Draw the scale in the output path if needed
    String scalerange = params.get("scalerange");
    if (scalerange != null) {
        String[] parts = scalerange.split("\\.\\.");
        double min = Double.parseDouble(parts[0]);
        double max = Double.parseDouble(parts[1]);
        String scale = params.get("scale", "none").toLowerCase();
        if (scale.equals("vertical")) {
            MultiHDFPlot.drawVerticalScale(new Path(output, "scale.png"), min, max, 64, imageHeight, params);
        } else if (scale.equals("horizontal")) {
            MultiHDFPlot.drawHorizontalScale(new Path(output, "scale.png"), min, max, imageWidth, 64, params);
        }
    }
    // Add the KML file
    createKML(outFs, output, mbr, params);
    return true;
}