List of usage examples for java.util Vector toArray
@SuppressWarnings("unchecked") public synchronized <T> T[] toArray(T[] a)
From source file:co.carlosjimenez.android.currencyalerts.app.sync.ForexSyncAdapter.java
/** * Take the String representing the complete forex in JSON Format and * pull out the data we need to construct the Strings needed for the wireframes. * <p/>/* w ww .j av a 2s . c o m*/ * Fortunately parsing is easy: constructor takes the JSON string and converts it * into an Object hierarchy for us. */ private void getForexDataFromJson(String forexJsonStr, String currencyQuery) throws JSONException { // Now we have a String representing the complete forex in JSON Format. // Fortunately parsing is easy: constructor takes the JSON string and converts it // into an Object hierarchy for us. // These are the names of the JSON objects that need to be extracted. final String OWM_RESULT = "results"; final String OWM_RATE_FROM = "fr"; final String OWM_RATE_TO = "to"; final String OWM_RATE = "val"; String[] currencies; boolean alertAvailable = false; try { if (mAlertData != null && mAlertData.getCurrencyFrom() != null && mAlertData.getCurrencyTo() != null) { alertAvailable = true; } JSONObject forexJson = new JSONObject(forexJsonStr); // do we have an error? if (!forexJson.has(OWM_RESULT)) { setForexStatus(getContext(), FOREX_STATUS_INVALID); return; } currencies = currencyQuery.split(","); JSONObject forexResultObject = forexJson.getJSONObject(OWM_RESULT); // OWM returns daily rates based upon the local time of the city that is being // asked for, which means that we need to know the GMT offset to translate this data // properly. // Since this data is also sent in-order and the first day is always the // current day, we're going to take advantage of that to get a nice // normalized UTC date for all of our rates. Time dayTime = new Time(); dayTime.setToNow(); // we start at the day returned by local time. Otherwise this is a mess. int julianDate = Time.getJulianDay(System.currentTimeMillis(), dayTime.gmtoff); // now we work exclusively in UTC dayTime = new Time(); long dateTime = dayTime.setJulianDay(julianDate); // Insert the new rates information into the database Vector<ContentValues> cVVector = new Vector<>(currencies.length); for (int i = 0; i < currencies.length; i++) { JSONObject currencyObject = forexResultObject.getJSONObject(currencies[i]); String rate_from = currencyObject.getString(OWM_RATE_FROM); String rate_to = currencyObject.getString(OWM_RATE_TO); double result = currencyObject.getDouble(OWM_RATE); ContentValues forexValues = new ContentValues(); forexValues.put(ForexContract.RateEntry.COLUMN_RATE_FROM_KEY, rate_from); forexValues.put(ForexContract.RateEntry.COLUMN_RATE_TO_KEY, rate_to); forexValues.put(ForexContract.RateEntry.COLUMN_RATE_DATE, dateTime); forexValues.put(ForexContract.RateEntry.COLUMN_RATE_VALUE, result); if (alertAvailable && mAlertData.getCurrencyFrom().getId().equals(rate_from) && mAlertData.getCurrencyTo().getId().equals(rate_to)) { mCurrentAlertRate = result; } cVVector.add(forexValues); } int inserted = 0; // add to database if (cVVector.size() > 0) { ContentValues[] cvArray = new ContentValues[cVVector.size()]; cVVector.toArray(cvArray); getContext().getContentResolver().bulkInsert(ForexContract.RateEntry.CONTENT_URI, cvArray); // delete old data so we don't build up an endless history getContext().getContentResolver().delete(ForexContract.RateEntry.CONTENT_URI, ForexContract.RateEntry.COLUMN_RATE_DATE + " <= ?", new String[] { Long.toString(dayTime.setJulianDay(julianDate - FOREX_DAYS_TO_KEEP)) }); setForexSyncDate(getContext(), System.currentTimeMillis()); sendSyncBroadcast(FOREX_STATUS_OK); checkCurrencyData(); } Log.d(LOG_TAG, "ForexSyncAdapter: Sync Complete. " + cVVector.size() + " Inserted"); setForexStatus(getContext(), FOREX_STATUS_OK); } catch (JSONException e) { Log.e(LOG_TAG, e.getMessage(), e); e.printStackTrace(); setForexStatus(getContext(), FOREX_STATUS_SERVER_INVALID); } }
From source file:org.sakaiproject.component.app.syllabus.SyllabusServiceImpl.java
/** * from StringUtil.java// w w w . j av a 2s .com */ protected String[] split(String source, String splitter) { // hold the results as we find them Vector rv = new Vector(); int last = 0; int next = 0; do { // find next splitter in source next = source.indexOf(splitter, last); if (next != -1) { // isolate from last thru before next rv.add(source.substring(last, next)); last = next + splitter.length(); } } while (next != -1); if (last < source.length()) { rv.add(source.substring(last, source.length())); } // convert to array return (String[]) rv.toArray(new String[rv.size()]); }
From source file:org.jopac2.jbal.iso2709.Unimarc.java
@Override public ElectronicResource[] getElectronicVersion() { Vector<ElectronicResource> v = new Vector<ElectronicResource>(); Vector<Tag> resources = getTags("856"); for (Tag resource : resources) { ElectronicResource el = convertElectronicResource(resource); v.addElement(el);/* w w w .j ava 2 s .c o m*/ } return v.toArray(new ElectronicResource[v.size()]); }
From source file:de.juwimm.cms.remote.UserServiceSpringImpl.java
/** * Returns all users regardings the active site and the given unit.<br> * If the Group will be <b>null</b>, it will return all users for this unit.<br> * PLEASE NOTE: DOES NOT RETURN SITEROOTS! <b>SECURITY INFORMATION:</b> * Available only to: <i>siteRoot, unitAdmin</i> * /*from w w w . java 2 s . c om*/ * @param unitId * The Unit * @return Returns all UserValue Objects in an Array. Is empty if nobody was * found. * * @see de.juwimm.cms.remote.UserServiceSpring#getAllUser4Unit(java.lang.Integer) */ @Override protected UserValue[] handleGetAllUser4Unit(Integer unitId) throws Exception { if (unitId == null) { return new UserValue[0]; } Vector<UserValue> userValues = new Vector<UserValue>(); try { UserHbm userMe = super.getUserHbmDao().load(AuthenticationHelper.getUserName()); if (getUserHbmDao().isInUnit(unitId, userMe)) { UnitHbm unit = super.getUnitHbmDao().load(unitId); userValues.addAll(this.getUsers4Unit(userMe, unit)); } } catch (Exception e) { throw new UserException(e.getMessage()); } return userValues.toArray(new UserValue[0]); }
From source file:gda.scan.ScanDataPoint.java
/** * Just returns array of detector data./* w w w .j a v a 2s.c om*/ * * @return all detector data. */ @Override public Double[] getDetectorDataAsDoubles() { Vector<Double> vals = new Vector<Double>(); if (getDetectorData() != null) { for (Object data : getDetectorData()) { PlottableDetectorData wrapper = (data instanceof PlottableDetectorData) ? (PlottableDetectorData) data : new DetectorDataWrapper(data); Double[] dvals = wrapper.getDoubleVals(); vals.addAll(Arrays.asList(dvals)); } } int expectedSize = getDetectorHeader().size(); int actualSize = vals.size(); if (actualSize != expectedSize) { throw new IllegalArgumentException("Detector data does not hold the expected number of fields actual:" + actualSize + " expected:" + expectedSize); } return vals.toArray(new Double[] {}); }
From source file:uk.ac.babraham.SeqMonk.Pipelines.CodonBiasPipeline.java
protected void startPipeline() { // We first need to generate probes over all of the features listed in // the feature types. The probes should cover the whole area of the // feature regardless of where it splices. Vector<Probe> probes = new Vector<Probe>(); double pValue = optionsPanel.pValue(); String libraryType = optionsPanel.libraryType(); Chromosome[] chrs = collection().genome().getAllChromosomes(); for (int c = 0; c < chrs.length; c++) { if (cancel) { progressCancelled();//from ww w. j ava2 s .c om return; } progressUpdated("Making probes for chr" + chrs[c].name(), c, chrs.length * 2); Feature[] features = collection().genome().annotationCollection().getFeaturesForType(chrs[c], optionsPanel.getSelectedFeatureType()); for (int f = 0; f < features.length; f++) { if (cancel) { progressCancelled(); return; } Probe p = new Probe(chrs[c], features[f].location().start(), features[f].location().end(), features[f].location().strand(), features[f].name()); probes.add(p); } } allProbes = probes.toArray(new Probe[0]); collection().setProbeSet(new ProbeSet("Features over " + optionsPanel.getSelectedFeatureType(), allProbes)); // Now we can quantitate each individual feature and test for whether it is significantly // showing codon bias ArrayList<Vector<ProbeTTestValue>> significantProbes = new ArrayList<Vector<ProbeTTestValue>>(); // data contains the data stores that this pipeline is going to use. We need to test each data store. for (int d = 0; d < data.length; d++) { significantProbes.add(new Vector<ProbeTTestValue>()); } int probeCounter = 0; for (int c = 0; c < chrs.length; c++) { if (cancel) { progressCancelled(); return; } progressUpdated("Quantitating features on chr" + chrs[c].name(), chrs.length + c, chrs.length * 2); Feature[] features = collection().genome().annotationCollection().getFeaturesForType(chrs[c], optionsPanel.getSelectedFeatureType()); for (int p = 0; p < features.length; p++) { //Get the corresponding feature and work out the mapping between genomic position and codon sub position. int[] mappingArray = createGenomeMappingArray(features[p]); DATASTORE_LOOP: for (int d = 0; d < data.length; d++) { if (cancel) { progressCancelled(); return; } long[] reads = data[d].getReadsForProbe(allProbes[probeCounter]); //System.err.println("Number of reads = " + reads.length); //TODO: make this configurable if (reads.length < 5) { data[d].setValueForProbe(allProbes[probeCounter], Float.NaN); continue DATASTORE_LOOP; } int pos1Count = 0; int pos2Count = 0; int pos3Count = 0; //System.out.println("quantitating " + data[d].name()); //System.err.println("Number of reads = " + reads.length); READ_LOOP: for (int r = 0; r < reads.length; r++) { int genomicReadStart = SequenceRead.start(reads[r]); int genomicReadEnd = SequenceRead.end(reads[r]); int readStrand = SequenceRead.strand(reads[r]); int relativeReadStart = -1; // work out where the start of the read is relative to the feature, // depending on strandedness of read, probe and library type // forward reads if (readStrand == 1) { if (libraryType == "Same strand specific") { if (features[p].location().strand() == Location.FORWARD) { // The start of the read needs to be within the feature if (genomicReadStart - features[p].location().start() < 0) { continue READ_LOOP; } else { // look up the read start pos in the mapping array relativeReadStart = mappingArray[genomicReadStart - features[p].location().start()]; } } } else if (libraryType == "Opposing strand specific") { if (features[p].location().strand() == Location.REVERSE) { // The start of the read needs to be within the feature // The "start" of a reverse read/probe is actually the end if (features[p].location().end() - genomicReadEnd < 0) { continue READ_LOOP; } else { relativeReadStart = mappingArray[features[p].location().end() - genomicReadEnd]; } } } } // reverse reads if (readStrand == -1) { if (libraryType == "Same strand specific") { if (features[p].location().strand() == Location.REVERSE) { if (features[p].location().end() - genomicReadEnd < 0) { continue READ_LOOP; } else { //System.out.println("features[p].location().end() is " + features[p].location().end() + ", genomicReadEnd is " + genomicReadEnd); //System.out.println("mapping array[0] is " + mappingArray[0]); relativeReadStart = mappingArray[features[p].location().end() - genomicReadEnd]; } } } else if (libraryType == "Opposing strand specific") { if (features[p].location().strand() == Location.FORWARD) { // The start of the read needs to be within the feature if (genomicReadStart - features[p].location().start() < 0) { continue READ_LOOP; } else { // look up the read start position in the mapping array relativeReadStart = mappingArray[genomicReadStart - features[p].location().start()]; } } } } // find out which position the read is in if (relativeReadStart == -1) { continue READ_LOOP; } else if (relativeReadStart % 3 == 0) { pos3Count++; continue READ_LOOP; } else if ((relativeReadStart + 1) % 3 == 0) { pos2Count++; continue READ_LOOP; } else if ((relativeReadStart + 2) % 3 == 0) { pos1Count++; } } // closing bracket for read loop //System.out.println("pos1Count for "+ features[p].name() + " is " + pos1Count); //System.out.println("pos2Count for "+ features[p].name() + " is " + pos2Count); //System.out.println("pos3Count for "+ features[p].name() + " is " + pos3Count); int interestingCodonCount = 0; int otherCodonCount = 0; if (optionsPanel.codonSubPosition() == 1) { interestingCodonCount = pos1Count; otherCodonCount = pos2Count + pos3Count; } else if (optionsPanel.codonSubPosition() == 2) { interestingCodonCount = pos2Count; otherCodonCount = pos1Count + pos3Count; } else if (optionsPanel.codonSubPosition() == 3) { interestingCodonCount = pos3Count; otherCodonCount = pos1Count + pos2Count; } int totalCount = interestingCodonCount + otherCodonCount; //BinomialDistribution bd = new BinomialDistribution(interestingCodonCount+otherCodonCount, 1/3d); BinomialDistribution bd = new BinomialDistribution(totalCount, 1 / 3d); // Since the binomial distribution gives the probability of getting a value higher than // this we need to subtract one so we get the probability of this or higher. double thisPValue = 1 - bd.cumulativeProbability(interestingCodonCount - 1); if (interestingCodonCount == 0) thisPValue = 1; // We have to add all results at this stage so we don't mess up the multiple // testing correction later on. significantProbes.get(d).add(new ProbeTTestValue(allProbes[probeCounter], thisPValue)); float percentageCount; if (totalCount == 0) { percentageCount = 0; } else { percentageCount = ((float) interestingCodonCount / (float) totalCount) * 100; } data[d].setValueForProbe(allProbes[probeCounter], percentageCount); //System.out.println("totalCount = " + totalCount); //System.out.println("interestingCodonCount " + interestingCodonCount); //System.out.println("pValue = " + thisPValue); //System.out.println("percentageCount = " + percentageCount); //System.out.println(""); } probeCounter++; } } // Now we can go through the set of significant probes, applying a correction and then // filtering those which pass our p-value cutoff for (int d = 0; d < data.length; d++) { ProbeTTestValue[] ttestResults = significantProbes.get(d).toArray(new ProbeTTestValue[0]); BenjHochFDR.calculateQValues(ttestResults); ProbeList newList = new ProbeList(collection().probeSet(), "Codon bias < " + pValue + " in " + data[d].name(), "Probes showing significant codon bias for position " + optionsPanel.codonSubPosition() + " with a cutoff of " + pValue, "FDR"); for (int i = 0; i < ttestResults.length; i++) { //System.out.println("p value is " + ttestResults[i].p + ", q value is " + ttestResults[i].q); if (ttestResults[i].q < pValue) { newList.addProbe(ttestResults[i].probe, (float) ttestResults[i].q); } } } StringBuffer quantitationDescription = new StringBuffer(); quantitationDescription.append("Codon bias pipeline using codon position " + optionsPanel.codonSubPosition() + " for " + optionsPanel.libraryType() + " library."); collection().probeSet().setCurrentQuantitation(quantitationDescription.toString()); quantitatonComplete(); }
From source file:com.sos.VirtualFileSystem.FTP.SOSVfsFtpBaseClass.java
/** * return a listing of the files in a directory in long format on * the remote machine/* w ww . j a va 2 s. c o m*/ * @param pathname on remote machine * @return a listing of the contents of a directory on the remote machine * @exception Exception * @see #nList() * @see #nList( String ) * @see #dir() */ public SOSFileList dir(final String pathname) { Vector<String> strList = getFilenames(pathname); String[] strT = strList.toArray(new String[strList.size()]); SOSFileList objFileList = new SOSFileList(strT); return objFileList; }
From source file:org.sakaiproject.component.app.messageforums.DiscussionForumServiceImpl.java
protected String[] split(String source, String splitter) { // hold the results as we find them Vector rv = new Vector(); int last = 0; int next = 0; do {/*w w w .j av a2 s . co m*/ // find next splitter in source next = source.indexOf(splitter, last); if (next != -1) { // isolate from last thru before next rv.add(source.substring(last, next)); last = next + splitter.length(); } } while (next != -1); if (last < source.length()) { rv.add(source.substring(last, source.length())); } // convert to array return (String[]) rv.toArray(new String[rv.size()]); }
From source file:tech.salroid.filmy.parser.MainActivityParseWork.java
public void parseupcoming() { try {//ww w .j a v a 2 s . co m JSONObject jsonObject = new JSONObject(result); JSONArray jsonArray = jsonObject.getJSONArray("results"); Vector<ContentValues> cVVector = new Vector<ContentValues>(jsonArray.length()); for (int i = 0; i < jsonArray.length(); i++) { String title, poster, id; title = (jsonArray.getJSONObject(i)).getString("title"); poster = (jsonArray.getJSONObject(i).getString("poster_path")); id = (jsonArray.getJSONObject(i)).getString("id"); String temp_year[] = (jsonArray.getJSONObject(i)).getString("release_date").split("-"); String year = temp_year[0]; String trimmedQuery = (title.toLowerCase()).trim(); String finalQuery = trimmedQuery.replace(" ", "-"); finalQuery = finalQuery.replace("'", "-"); String slug = (finalQuery.replace(":", "")) + "-" + year; // Insert the new weather information into the database ContentValues movieValues = new ContentValues(); if (!(poster.equals("null"))) { movieValues.put(FilmContract.MoviesEntry.MOVIE_ID, id); movieValues.put(FilmContract.MoviesEntry.MOVIE_TITLE, title); movieValues.put(FilmContract.MoviesEntry.MOVIE_YEAR, year); movieValues.put(FilmContract.MoviesEntry.MOVIE_POSTER_LINK, "http://image.tmdb.org/t/p/w185" + poster); cVVector.add(movieValues); } } int inserted = 0; if (cVVector.size() > 0) { ContentValues[] cvArray = new ContentValues[cVVector.size()]; cVVector.toArray(cvArray); context.getContentResolver().delete(FilmContract.UpComingMoviesEntry.CONTENT_URI, null, null); inserted = context.getContentResolver().bulkInsert(FilmContract.UpComingMoviesEntry.CONTENT_URI, cvArray); } } catch (JSONException e1) { e1.printStackTrace(); } return; }
From source file:tech.salroid.filmy.parser.MainActivityParseWork.java
public void intheatres() { try {/*w w w. ja v a 2 s. c o m*/ JSONObject jsonObject = new JSONObject(result); JSONArray jsonArray = jsonObject.getJSONArray("results"); Vector<ContentValues> cVVector = new Vector<ContentValues>(jsonArray.length()); for (int i = 0; i < jsonArray.length(); i++) { //movieData = new MovieData(); String title, poster, id; title = (jsonArray.getJSONObject(i)).getString("title"); poster = (jsonArray.getJSONObject(i).getString("poster_path")); id = (jsonArray.getJSONObject(i)).getString("id"); String temp_year[] = (jsonArray.getJSONObject(i)).getString("release_date").split("-"); String year = temp_year[0]; String trimmedQuery = (title.toLowerCase()).trim(); String finalQuery = trimmedQuery.replace(" ", "-"); finalQuery = finalQuery.replace("'", "-"); String slug = (finalQuery.replace(":", "")) + "-" + year; if (!(poster.equals("null"))) { ContentValues movieValues = new ContentValues(); movieValues.put(FilmContract.MoviesEntry.MOVIE_ID, id); movieValues.put(FilmContract.MoviesEntry.MOVIE_TITLE, title); movieValues.put(FilmContract.MoviesEntry.MOVIE_YEAR, year); movieValues.put(FilmContract.MoviesEntry.MOVIE_POSTER_LINK, "http://image.tmdb.org/t/p/w185" + poster); cVVector.add(movieValues); } } int inserted = 0; // add to database if (cVVector.size() > 0) { ContentValues[] cvArray = new ContentValues[cVVector.size()]; cVVector.toArray(cvArray); context.getContentResolver().delete(FilmContract.InTheatersMoviesEntry.CONTENT_URI, null, null); inserted = context.getContentResolver().bulkInsert(FilmContract.InTheatersMoviesEntry.CONTENT_URI, cvArray); } } catch (JSONException e1) { e1.printStackTrace(); } return; }