List of usage examples for java.util Vector addAll
public boolean addAll(Collection<? extends E> c)
From source file:Main.java
public static void main(String[] args) { Vector<String> v = new Vector<String>(); v.add("1");// w w w . j a v a 2 s.c om v.add("2"); v.add("3"); ArrayList<String> arrayList = new ArrayList<String>(); arrayList.add("4"); arrayList.add("5"); v.addAll(arrayList); for (int i = 0; i < v.size(); i++) { System.out.println(v.get(i)); } }
From source file:Main.java
public static void main(String[] args) { // create two empty Vectors firstvec and secondvec Vector<Integer> firstvec = new Vector<Integer>(4); Vector<Integer> secondvec = new Vector<Integer>(4); // use add() method to add elements in the secondvec vector secondvec.add(5);//from ww w . j a va 2 s . com secondvec.add(6); secondvec.add(7); secondvec.add(8); // use add() method to add elements in the firstvec vector firstvec.add(1); firstvec.add(2); firstvec.add(3); firstvec.add(4); // use addAll() method to add secondvec with firstvec vector firstvec.addAll(secondvec); // let us print all the elements available in vector firstvec vector System.out.println("Added numbers are :- "); System.out.println(firstvec); }
From source file:org.commoncrawl.service.listcrawler.HDFSFileIndex.java
public static void main(String[] args) { try {//from www .ja v a 2 s. co m ByteStream outputStream = new ByteStream(8192); Vector<FingerprintAndOffsetTuple> fpInfo = new Vector<FingerprintAndOffsetTuple>(); // construct 10000 entries with randomin fingerprints for (int i = 0; i < 10000; ++i) { MessageDigest digester; digester = MessageDigest.getInstance("MD5"); long time = System.currentTimeMillis(); digester.update((new UID() + "@" + time + ":" + i).getBytes()); FingerprintAndOffsetTuple offsetInfo = new FingerprintAndOffsetTuple( URLFingerprint.generate64BitURLFPrint(StringUtils.byteToHexString(digester.digest())), i * 10000); fpInfo.add(offsetInfo); } // clone the vector Vector<FingerprintAndOffsetTuple> fpInfoCloned = new Vector<FingerprintAndOffsetTuple>(); fpInfoCloned.addAll(fpInfo); // now write out the index ... writeIndex(fpInfoCloned, new DataOutputStream(outputStream)); // spit out some basic stats System.out.println("output buffer size is:" + outputStream.size()); } catch (Exception e) { CacheManager.LOG.error(CCStringUtils.stringifyException(e)); } }
From source file:Main.java
public static Vector<String> getPositiveAssociation(String word) { Vector<String> result = new Vector<String>(); result.add(word);/*from w ww .jav a 2 s . c om*/ try { result.addAll(posMap.get(word)); } catch (Exception e) { } return result; }
From source file:Main.java
public static Vector<String> getNegativeAssociation(String word) { Vector<String> result = new Vector<String>(); result.add(word);// w w w.ja va 2 s . c om try { result.addAll(negMap.get(word)); } catch (Exception e) { } return result; }
From source file:com.chinamobile.bcbsp.fault.tools.Zip.java
/** * To get a directory's all files.//from w ww. j a v a2s. c om * @param sourceFile * : the source directory * @return the files' collection */ private static Vector<File> getAllFiles(File sourceFile) { Vector<File> fileVector = new Vector<File>(); if (sourceFile.isDirectory()) { File[] files = sourceFile.listFiles(); for (int i = 0; i < files.length; i++) { fileVector.addAll(getAllFiles(files[i])); } } else { fileVector.add(sourceFile); } return fileVector; }
From source file:com.ricemap.spateDB.mapred.FileSplitUtil.java
/** * Combines two file splits into a CombineFileSplit. * @param conf// w w w .j a va 2 s . c o m * @param split1 * @param split2 * @return * @throws IOException */ public static InputSplit combineFileSplits(JobConf conf, FileSplit split1, FileSplit split2) throws IOException { Path[] paths = new Path[2]; long[] starts = new long[2]; long[] lengths = new long[2]; Vector<String> vlocations = new Vector<String>(); paths[0] = split1.getPath(); starts[0] = split1.getStart(); lengths[0] = split1.getLength(); vlocations.addAll(Arrays.asList(split1.getLocations())); paths[1] = split2.getPath(); starts[1] = split2.getStart(); lengths[1] = split2.getLength(); vlocations.addAll(Arrays.asList(split2.getLocations())); String[] locations = prioritizeLocations(vlocations); return new CombineFileSplit(conf, paths, starts, lengths, locations); }
From source file:savant.tableview.SavantRecordModel.java
public static Vector getColumnNamesForTrack(TrackAdapter t) { Vector result = new Vector(); result.add("No."); switch (t.getDataSource().getDataFormat()) { case SEQUENCE_FASTA: result.addAll(Arrays.asList(sequenceColumnNames)); break;// www .j a va 2 s .co m case INTERVAL_BAM: result.addAll(Arrays.asList(bamColumnNames)); break; case INTERVAL_BED: result.addAll(Arrays.asList(bedColumnNames)); break; case INTERVAL_GENERIC: result.addAll(Arrays.asList(intervalColumnNames)); break; case CONTINUOUS_GENERIC: result.addAll(Arrays.asList(continuousColumnNames)); break; case POINT_GENERIC: result.addAll(Arrays.asList(pointColumnNames)); break; case TABIX: String[] tabixColumnNames = null; if (t.getDataInRange().size() > 0) { TabixIntervalRecord r = ((TabixIntervalRecord) t.getDataInRange().get(0)); int numfields = ((TabixIntervalRecord) t.getDataInRange().get(0)).getOtherValues().size(); tabixColumnNames = new String[3 + numfields]; for (int i = 0; i < numfields; i++) { tabixColumnNames[i + 3] = "Field" + (i + 3 + 1); } } else { tabixColumnNames = new String[3]; } tabixColumnNames[0] = "Reference"; tabixColumnNames[1] = "Start"; tabixColumnNames[2] = "End"; result.addAll(Arrays.asList(tabixColumnNames)); break; default: throw new UnsupportedOperationException(t.getDataSource().getDataFormat() + " is not supported"); } return result; }
From source file:savant.tableview.SavantRecordModel.java
public static Vector getColumnClassesForTrack(TrackAdapter t) { Vector result = new Vector(); result.add(Integer.class); switch (t.getDataSource().getDataFormat()) { case SEQUENCE_FASTA: result.addAll(Arrays.asList(sequenceColumnClasses)); break;/*from w w w .j a va2 s . c om*/ case INTERVAL_BAM: result.addAll(Arrays.asList(bamColumnClasses)); break; case INTERVAL_BED: result.addAll(Arrays.asList(bedColumnClasses)); break; case INTERVAL_GENERIC: result.addAll(Arrays.asList(intervalColumnClasses)); break; case CONTINUOUS_GENERIC: result.addAll(Arrays.asList(continuousColumnClasses)); break; case POINT_GENERIC: result.addAll(Arrays.asList(pointColumnClasses)); break; case TABIX: Class[] tabixColumnClasses = null; if (t.getDataInRange().size() > 0) { TabixIntervalRecord r = ((TabixIntervalRecord) t.getDataInRange().get(0)); int numfields = ((TabixIntervalRecord) t.getDataInRange().get(0)).getOtherValues().size(); tabixColumnClasses = new Class[3 + numfields]; for (int i = 0; i < numfields; i++) { tabixColumnClasses[i + 3] = sc; } } else { tabixColumnClasses = new Class[3]; } tabixColumnClasses[0] = sc; tabixColumnClasses[1] = lc; ; tabixColumnClasses[2] = lc; result.addAll(Arrays.asList(tabixColumnClasses)); break; default: throw new UnsupportedOperationException(""); } return result; }
From source file:com.ricemap.spateDB.mapred.FileSplitUtil.java
/** * Combines a number of file splits into one CombineFileSplit. If number of * splits to be combined is one, it returns this split as is without creating * a CombineFileSplit.//from w w w . j a v a2s.co m * @param splits * @param startIndex * @param count * @return * @throws IOException */ public static InputSplit combineFileSplits(JobConf conf, List<FileSplit> splits, int startIndex, int count) throws IOException { if (count == 1) { return splits.get(startIndex); } else { Path[] paths = new Path[count]; long[] starts = new long[count]; long[] lengths = new long[count]; Vector<String> vlocations = new Vector<String>(); while (count > 0) { paths[count - 1] = splits.get(startIndex).getPath(); starts[count - 1] = splits.get(startIndex).getStart(); lengths[count - 1] = splits.get(startIndex).getLength(); vlocations.addAll(Arrays.asList(splits.get(startIndex).getLocations())); count--; startIndex++; } String[] locations = prioritizeLocations(vlocations); return new CombineFileSplit(conf, paths, starts, lengths, locations); } }