List of usage examples for java.util Vector toArray
@SuppressWarnings("unchecked") public synchronized <T> T[] toArray(T[] a)
From source file:com.delcyon.capo.Configuration.java
public DefaultDocumentProvider[] getDefaultDocumentProviders() { Vector<DefaultDocumentProvider> defaultDocumentProviderVector = new Vector<DefaultDocumentProvider>(); Set<String> defaultDocumentProviderSet = CapoApplication.getAnnotationMap() .get(DefaultDocumentProvider.class.getCanonicalName()); for (String className : defaultDocumentProviderSet) { try {/* ww w . ja va 2 s . co m*/ defaultDocumentProviderVector .add(Class.forName(className).getAnnotation(DefaultDocumentProvider.class)); } catch (ClassNotFoundException classNotFoundException) { CapoApplication.logger.log(Level.WARNING, "Error getting document providers", classNotFoundException); } } return defaultDocumentProviderVector.toArray(new DefaultDocumentProvider[] {}); }
From source file:com.skywomantechnology.app.guildviewer.sync.GuildViewerSyncAdapter.java
/** * Converts the ContentValue vector to an array and bulk inserts them * then clears out the vector array//from w w w . ja va 2 s . co m * * @param cVVector records to insert * @return int count of how many were actually inserted */ private int insertNews(Vector<ContentValues> cVVector) { int insertCount = 0; if (cVVector == null || cVVector.isEmpty()) return insertCount; int numRecordsToInsert = cVVector.size(); if (numRecordsToInsert > 0) { // convert to an array for the bulk insert to work with ContentValues[] cvArray = new ContentValues[cVVector.size()]; cVVector.toArray(cvArray); // inserts into the storage insertCount = mContext.getContentResolver().bulkInsert(NewsEntry.CONTENT_URI, cvArray); // clear out the loaded records so there are not duplicates cVVector.clear(); } return insertCount; }
From source file:li.zeitgeist.api.ZeitgeistApi.java
/** * Update the tags of a item./*from ww w . j a v a2 s . c o m*/ * * This adds or removes taggings from an item that is specified * by ID. Tags is a comma seperated list with tags, each tag * can be prefixed by + or - to specify to add or delete a tag, * note that + is optional due to be the default. * * @param id * @param tags * @return the updated item. * @throws ZeitgeistError */ public Item update(int id, String tags) throws ZeitgeistError { Vector<String> addTags = new Vector<String>(); Vector<String> delTags = new Vector<String>(); String[] tagsArray = tags.split(","); for (String tag : tagsArray) { tag = tag.trim(); if (tag.charAt(0) == '-') { tag = tag.substring(1); delTags.add(tag); } else { if (tag.charAt(0) == '+') { tag = tag.substring(1); } addTags.add(tag); } } return this.update(id, addTags.toArray(new String[0]), delTags.toArray(new String[0])); }
From source file:edu.umn.cs.spatialHadoop.mapred.SpatialRecordReader.java
/** * Reads all shapes left in the current block in one shot. This function * runs a loop where it keeps reading shapes by calling the method * {@link #nextShape(Shape)} until one of the following conditions happen. * 1. The whole file is read. No more records to read. * 2. Number of parsed records reaches the threshold defined by the * configuration parameter spatialHadoop.mapred.MaxShapesPerRead. * To disable this check, set the configuration parameter to -1 * 3. Total size of parsed data from file reaches the threshold defined by * the configuration parameter spatialHadoop.mapred.MaxBytesPerRead. * To disable this check, set the configuration parameter to -1. * //from ww w . jav a 2 s .com * @param shapes * @return * @throws IOException */ protected boolean nextShapes(ArrayWritable shapes) throws IOException { // Prepare a vector that will hold all objects in this Vector<Shape> vshapes = new Vector<Shape>(); try { Shape stockObject = (Shape) shapes.getValueClass().newInstance(); // Reached the end of this split if (getFilePosition() >= end) return false; long initialReadPos = getPos(); long readBytes = 0; // Read all shapes in this block while ((maxShapesInOneRead <= 0 || vshapes.size() < maxShapesInOneRead) && (maxBytesInOneRead <= 0 || readBytes < maxBytesInOneRead) && nextShape(stockObject)) { vshapes.add(stockObject.clone()); readBytes = getPos() - initialReadPos; } // Store them in the return value shapes.set(vshapes.toArray(new Shape[vshapes.size()])); return !vshapes.isEmpty(); } catch (InstantiationException e1) { e1.printStackTrace(); } catch (IllegalAccessException e1) { e1.printStackTrace(); } catch (OutOfMemoryError e) { LOG.error("Error reading shapes. Stopped with " + vshapes.size() + " shapes"); throw e; } return false; }
From source file:com.ricemap.spateDB.operations.Repartition.java
public static <S extends Shape> CellInfo[] packInPrisms(FileSystem fs, Path[] files, FileSystem outFileSystem, Path outFile, long blocksize, S stockShape) throws IOException { final Vector<Point3d> sample = new Vector<Point3d>(); double sample_ratio = outFileSystem.getConf().getFloat(SpatialSite.SAMPLE_RATIO, 0.01f); long sample_size = outFileSystem.getConf().getLong(SpatialSite.SAMPLE_SIZE, 100 * 1024 * 1024); // 24 is the estimated size in bytes needed to store each sample point long sample_count = sample_size / 24; LOG.info("Reading a sample of " + (int) Math.round(sample_ratio * 100) + "%"); ResultCollector<Point3d> resultCollector = new ResultCollector<Point3d>() { @Override//from w w w . j a v a 2 s . c om public void collect(Point3d value) { sample.add(value.clone()); } }; Sampler.sampleWithRatio(fs, files, sample_ratio, sample_count, System.currentTimeMillis(), resultCollector, stockShape, new Point3d()); LOG.info("Finished reading a sample of size: " + sample.size() + " records"); long inFileSize = Sampler.sizeOfLastProcessedFile; // Compute an approximate MBR to determine the desired number of rows // and columns Prism approxMBR = new Prism(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE, -Double.MAX_VALUE, -Double.MAX_VALUE, -Double.MAX_VALUE); for (Point3d pt : sample) { approxMBR.expand(pt); } GridInfo gridInfo = new GridInfo(approxMBR.t1, approxMBR.x1, approxMBR.y1, approxMBR.t2, approxMBR.x2, approxMBR.y2); gridInfo.calculateCellDimensions(Math.max(1, (int) ((inFileSize + blocksize / 2) / blocksize))); gridInfo.set(-Double.MAX_VALUE, -Double.MAX_VALUE, -Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); Prism[] Prisms = RTree.packInPrisms(gridInfo, sample.toArray(new Point3d[sample.size()])); CellInfo[] cellsInfo = new CellInfo[Prisms.length]; for (int i = 0; i < Prisms.length; i++) cellsInfo[i] = new CellInfo(i + 1, Prisms[i]); return cellsInfo; }
From source file:DOMProcessor.java
/** Returns the DOM elements with the given name that are the children of the * given node. This is a non-recursive method that only looks for immediate * children. Array will be 0 length if none found. * @param name Element name to search for. * @param node Node from which to examine children. * @return Child nodes or empty Node array if none found. *//*w w w . j a v a 2 s.c om*/ public Node[] getNodeElements(String name, Node node) { // Only consider document or element nodes. if ((node.getNodeType() != Node.DOCUMENT_NODE) && (node.getNodeType() != Node.ELEMENT_NODE)) { return new Node[0]; } Vector<Node> matchedChildren = new Vector<Node>(); NodeList children = node.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node child = children.item(i); // Only consider element child nodes. if (child.getNodeType() == Node.ELEMENT_NODE) { if (child.getNodeName().equalsIgnoreCase(name)) { matchedChildren.add(child); } } } Node[] nodes = new Node[matchedChildren.size()]; matchedChildren.toArray(nodes); return nodes; }
From source file:corelyzer.ui.CorelyzerApp.java
private static String[] initPlugins() { Vector<String> plugins = new Vector<String>(); addAllPlugins(new File("plugins"), plugins); addAllPlugins(new File("../plugins"), plugins); return plugins.toArray(new String[plugins.size()]); }
From source file:plugspud.PluginManager.java
/** * Initialise the plugin manager//from w w w .ja v a2s . c om * * @param context * context * * @throws PluginException * on any errors */ public void init(PluginHostContext context) throws PluginException { this.context = context; pluginMap = new HashMap<String, PluginManager.PluginWrapper>(); plugins = new Vector<PluginWrapper>(); startedPlugins = new Vector<PluginWrapper>(); // Create the plugin directory if it doesn't exist pluginDir = context.getPluginDirectory(); if (pluginDir == null) { context.log(PluginHostContext.LOG_ERROR, "No plugin directory has been provided by the plugin host."); } else { if (!pluginDir.exists() && !pluginDir.mkdirs()) throw new PluginException("Could not create plugin directory " + pluginDir.getAbsolutePath()); // First remove any plugins jars that are no longer required File removeFile = new File(pluginDir, "ros.list"); if (removeFile.exists() && removeFile.canRead()) { InputStream rin = null; try { rin = new FileInputStream(removeFile); BufferedReader reader = new BufferedReader(new InputStreamReader(rin)); String line = null; while ((line = reader.readLine()) != null) { File z = new File(line); context.log(PluginHostContext.LOG_INFORMATION, "Deleting plugin library " + z.getAbsolutePath()); if (!z.delete()) context.log(PluginHostContext.LOG_ERROR, "Failed to delete " + z.getAbsolutePath()); } } catch (IOException ioe) { context.log(PluginHostContext.LOG_ERROR, "Failed to read remove-on-startup list file " + removeFile.getAbsolutePath()); } finally { if (rin != null) { try { rin.close(); } catch (IOException ioe) { } } if (!removeFile.delete()) { context.log(PluginHostContext.LOG_ERROR, "Failed to remove remove-on-startup list file " + removeFile.getAbsolutePath() + ". Further errors may appear."); } } } if (!pluginDir.exists() && !pluginDir.mkdirs()) throw new PluginException("Could not create plugin directory " + pluginDir.getAbsolutePath()); // First unzip any newly installed plugin archives File[] newPlugins = pluginDir.listFiles(new FileFilter() { public boolean accept(File f) { return f.getName().toLowerCase().endsWith(".tmp"); } }); for (int i = 0; i < newPlugins.length; i++) { try { unzip(newPlugins[i], pluginDir); if (!newPlugins[i].delete()) { context.log(PluginHostContext.LOG_ERROR, "New plugin archive " + newPlugins[i].getAbsolutePath() + " could not be " + "deleted. Until this file is removed, this plugin " + "will continue to be installed every time " + context.getPluginHostName() + "starts up."); } } catch (Exception e) { context.log(PluginHostContext.LOG_ERROR, e); throw new PluginException( "Failed to unzip newly installed plugin " + newPlugins[i].getAbsolutePath() + ". " + e.getMessage() == null ? "<null>" : e.getMessage()); } } } try { // Create a classloader for all of the plugin jars Vector<URL> v = new Vector<URL>(); if (pluginDir != null) { URL u = pluginDir.toURL(); v.addElement(u); context.log(PluginHostContext.LOG_INFORMATION, "Added Found plugin directory " + u.toExternalForm()); findJars(pluginDir, v); URL[] urls = new URL[v.size()]; v.copyInto(urls); } URL[] urls = (URL[]) v.toArray(new URL[v.size()]); classLoader = new URLClassLoader(urls, parentClassLoader == null ? getClass().getClassLoader() : parentClassLoader); ((URLClassLoader) classLoader).getURLs(); // Add the standard plugins URL url = context.getStandardPluginsResource(); if (url != null) { try { loadPlugins(url, classLoader, true); } catch (PluginException pe) { context.log(PluginHostContext.LOG_ERROR, pe); } } else { // Add the plugins for (Enumeration<URL> e = classLoader.getResources("plugins.properties"); e.hasMoreElements();) { URL resource = e.nextElement(); context.log(PluginHostContext.LOG_DEBUG, "Found plugins.properties in " + resource.toExternalForm()); loadPlugins(resource, classLoader, false); } } Collections.sort(plugins); checkDependencies(); } catch (Throwable t) { throw new PluginException("Plugin manager failed to initialise. ", t); } initialised = true; }
From source file:hudson.plugins.dimensionsscm.DimensionsSCM.java
@DataBoundConstructor public DimensionsSCM(String project, String[] folders, String workarea, boolean canJobDelete, boolean canJobForce, boolean canJobRevert, String jobUserName, String jobPasswd, String jobServer, String jobDatabase, boolean canJobUpdate, String jobTimeZone, String jobWebUrl, String directory, String permissions, boolean canJobExpand, boolean canJobNoMetadata) { // Check the folders specified have data specified if (folders != null) { Logger.Debug("Folders are populated"); Vector<String> x = new Vector<String>(); for (int t = 0; t < folders.length; t++) { if (StringUtils.isNotEmpty(folders[t])) x.add(folders[t]);/*ww w . j a va 2 s .com*/ } this.folders = (String[]) x.toArray(new String[1]); } else { if (directory != null) this.folders[0] = directory; } // If nothing specified, then default to '/' if (this.folders.length < 2) { if (this.folders[0] == null || this.folders[0].length() < 1) this.folders[0] = "/"; } // Copying arguments to fields this.project = (Util.fixEmptyAndTrim(project) == null ? "${JOB_NAME}" : project); this.directory = (Util.fixEmptyAndTrim(directory) == null ? null : directory); this.permissions = (Util.fixEmptyAndTrim(permissions) == null ? "DEFAULT" : permissions); this.jobServer = (Util.fixEmptyAndTrim(jobServer) == null ? getDescriptor().getServer() : jobServer); this.jobUserName = (Util.fixEmptyAndTrim(jobUserName) == null ? getDescriptor().getUserName() : jobUserName); this.jobDatabase = (Util.fixEmptyAndTrim(jobDatabase) == null ? getDescriptor().getDatabase() : jobDatabase); String passwd = (Util.fixEmptyAndTrim(jobPasswd) == null ? getDescriptor().getPasswd() : jobPasswd); this.jobPasswd = Scrambler.scramble(passwd); if ((Util.fixEmptyAndTrim(jobServer)) == null) { this.canJobUpdate = getDescriptor().isCanUpdate(); } else { this.canJobUpdate = canJobUpdate; } this.canJobDelete = canJobDelete; this.canJobForce = canJobForce; this.canJobRevert = canJobRevert; this.canJobExpand = canJobExpand; this.canJobNoMetadata = canJobNoMetadata; this.jobTimeZone = (Util.fixEmptyAndTrim(jobTimeZone) == null ? getDescriptor().getTimeZone() : jobTimeZone); this.jobWebUrl = (Util.fixEmptyAndTrim(jobWebUrl) == null ? getDescriptor().getWebUrl() : jobWebUrl); String dmS = this.jobServer + "-" + this.jobUserName + ":" + this.jobDatabase; Logger.Debug("Starting job for project '" + this.project + "' ('" + this.folders.length + "')" + ", connecting to " + dmS); }
From source file:com.tlongdev.bktf.interactor.TlongdevPriceListInteractor.java
private int parseJson(InputStream inputStream) throws IOException { //Create a parser from the input stream for fast parsing and low impact on memory JsonFactory factory = new JsonFactory(); JsonParser parser = factory.createParser(inputStream); Vector<ContentValues> cVVector = new Vector<>(); int retVal = 0; int count = 0; //Not a JSON if it doesn't start with START OBJECT if (parser.nextToken() != JsonToken.START_OBJECT) { return -1; }/* ww w.ja va2 s . c om*/ while (parser.nextToken() != JsonToken.END_OBJECT) { String name = parser.getCurrentName(); parser.nextToken(); switch (name) { case "success": if (parser.getIntValue() == 0) { retVal = 1; } break; case "message": errorMessage = parser.getText(); break; case "count": count = parser.getIntValue(); break; case "prices": while (parser.nextToken() != JsonToken.END_ARRAY) { ContentValues values = buildContentValues(parser); cVVector.add(values); } if (cVVector.size() > 0) { ContentValues[] cvArray = new ContentValues[cVVector.size()]; cVVector.toArray(cvArray); //Insert all the data into the database rowsInserted = mContext.getContentResolver().bulkInsert(PriceEntry.CONTENT_URI, cvArray); Log.v(LOG_TAG, "inserted " + rowsInserted + " rows into prices table"); } break; } } parser.close(); return retVal; }