List of usage examples for java.util ArrayList clear
public void clear()
From source file:org.apache.hadoop.hive.ql.io.RCFileInputFormatSplitByLineNum.java
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { ArrayList<CombineRCFileFileSplit_WithLineNum> splits = new ArrayList<CombineRCFileFileSplit_WithLineNum>( numSplits);/*www. ja va 2 s .c o m*/ int lenNum = job.getInt("hive.inputfiles.line_num_per_split", 1000000); if (lenNum < 10000) { LOG.info("lenNum been set to " + lenNum + " is too small, so set it to 1000000"); lenNum = 1000000; } FileStatus[] fss = listStatus(job); FileStatus[] orignalFss = fss; List<FileStatus> fssList = new ArrayList<FileStatus>(); for (int i = 0; i < fss.length; i++) { if (fss[i].getLen() > 0) { fssList.add(fss[i]); } } fss = (FileStatus[]) fssList.toArray(new FileStatus[0]); int listSize = fss.length; if (listSize == 0) { mapredWork mrWork = Utilities.getMapRedWork(job); Path inputPath = orignalFss[0].getPath(); Path inputParentPath = inputPath.getParent(); String inputPathStr = inputPath.toUri().toString(); String inputPathParentStr = inputParentPath.toString(); FileSystem fs = inputPath.getFileSystem(job); fs.delete(inputPath, true); LinkedHashMap<String, partitionDesc> partDescMap = mrWork.getPathToPartitionInfo(); partitionDesc partDesc = partDescMap.get(inputPathParentStr); job.setBoolean("NeedPostfix", false); RecordWriter recWriter = new RCFileOutputFormat().getHiveRecordWriter(job, inputPath, Text.class, false, partDesc.getTableDesc().getProperties(), null); recWriter.close(false); job.setBoolean("NeedPostfix", true); fss = listStatus(job); } Random r = new Random(123456); for (int i = 0; i < fss.length; i++) { int x = r.nextInt(fss.length); FileStatus tmp = fss[i]; fss[i] = fss[x]; fss[x] = tmp; } long fileRecordNum = 0; long fileLen = 0; for (int i = 0; i < fss.length; i++) { String fileName = fss[i].getPath().toString(); if (fileName.endsWith(".rcf")) { int index = fileName.lastIndexOf("_"); String sub = fileName.substring(index + 1, fileName.length() - 4); fileRecordNum += Long.valueOf(sub); fileLen += fss[i].getLen(); } else { // throw new IOException("file:"+fileName+" is not rcfile."); } } long minBlockSize = job.getInt("hive.io.rcfile.record.buffer.size", 4 * 1024 * 1024) * 2; long splitLen = 0; if (fileRecordNum > 0) { splitLen = (fileLen / fileRecordNum) * lenNum; } long splitSize = Math.max(splitLen, minBlockSize); LOG.info("fileRecordNum=" + fileRecordNum + ",fileLen=" + fileLen + ",splitSize=" + splitSize); int id = 0; int preId = 0; long offset = 0; long currlen = 0; ArrayList<RCFileFileSplit_WithLineNum> currFileSplits = new ArrayList<RCFileFileSplit_WithLineNum>(); while (true) { long need = splitSize - currlen; long remain = fss[id].getLen() - offset; if (need <= remain) { if (preId != id && need < minBlockSize) {//8MB??MAP } else { currFileSplits .add(new RCFileFileSplit_WithLineNum(fss[id].getPath(), offset, need, (String[]) null)); offset += need; } splits.add(new CombineRCFileFileSplit_WithLineNum( currFileSplits.toArray(new RCFileFileSplit_WithLineNum[currFileSplits.size()]), fss[id].getPath().getFileSystem(job).getFileBlockLocations(fss[id], 0, fss[id].getLen())[0] .getHosts())); currFileSplits.clear(); currlen = 0; } else { if (remain != 0) { currFileSplits.add( new RCFileFileSplit_WithLineNum(fss[id].getPath(), offset, remain, (String[]) null)); } preId = id; id++; offset = 0; currlen += remain; } if (id == fss.length) { if (currFileSplits.size() != 0) { splits.add(new CombineRCFileFileSplit_WithLineNum( currFileSplits.toArray(new RCFileFileSplit_WithLineNum[currFileSplits.size()]), fss[id - 1].getPath().getFileSystem(job).getFileBlockLocations(fss[id - 1], 0, fss[id - 1].getLen())[0].getHosts())); } break; } } // add by payniexiao in 20130115 for resolve split by line bug if (splits.size() == 0) { ArrayList<RCFileFileSplit_WithLineNum> emptyFileSplits = new ArrayList<RCFileFileSplit_WithLineNum>(); emptyFileSplits.add(new RCFileFileSplit_WithLineNum(fss[0].getPath(), 0, 0, (String[]) null)); splits.add(new CombineRCFileFileSplit_WithLineNum( emptyFileSplits.toArray(new RCFileFileSplit_WithLineNum[emptyFileSplits.size()]), fss[0].getPath().getFileSystem(job).getFileBlockLocations(fss[0], 0, fss[0].getLen())[0] .getHosts())); } // add end for (int i = 0; i < splits.size(); i++) { LOG.info(splits.get(i).toString()); } LOG.info("Total # of splits: " + splits.size()); return splits.toArray(new CombineRCFileFileSplit_WithLineNum[splits.size()]); }
From source file:userinterface.properties.GUIGraphPicker.java
/** Creates new form GUIGraphPicker * @wbp.parser.constructor//from w w w . j av a 2 s. co m * @param parent The parent. * @param plugin The GUIPlugin (GUIMultiProperties) * @param experiment The experiment for which to plot a graph. * @param graphHandler The graph handler in which to display the graph. * @param resultsKnown If true, simply plot existing results (experiment has been done). * If false, attach listeners to the results such that plot is made when results become available. */ public GUIGraphPicker(GUIPrism parent, GUIPlugin plugin, GUIExperiment experiment, GUIGraphHandler graphHandler, boolean resultsKnown) { super(parent, true); setTitle("New Graph Series"); this.gui = parent; this.plugin = plugin; this.experiment = experiment; this.graphHandler = graphHandler; this.resultsCollection = experiment.getResults(); // graphCancelled will be set explicitly to false when the OK button is pressed // (this means if the user closes the dialog, this counts as a cancel) this.graphCancelled = true; this.multiSeries = new Vector<DefinedConstant>(); initComponents(); setResizable(false); init(); setLocationRelativeTo(getParent()); // centre getRootPane().setDefaultButton(lineOkayButton); /* Wait untill OK or Cancel is pressed. */ setVisible(true); /* If OK was pressed. */ if (!graphCancelled && this.plotType2d.isSelected()) { /* Collect series keys. */ Vector<SeriesKey> seriesKeys = new Vector<SeriesKey>(); /* Collect series Values */ ArrayList<Values> seriesValues = new ArrayList<Values>(); /* Add single constant values to each serie */ seriesValues.add(otherValues); for (int i = 0; i < multiSeries.size(); i++) { ArrayList<Values> temp = (ArrayList<Values>) seriesValues.clone(); seriesValues.clear(); // For each of the possible value in the range for (int j = 0; j < multiSeries.get(i).getNumSteps(); j++) { // Clone the list ArrayList copy = (ArrayList<Values>) temp.clone(); // For each element in the list for (int k = 0; k < copy.size(); k++) { Values v = new Values(); Values cp = (Values) copy.get(k); v.addValues(cp); v.addValue(multiSeries.get(i).getName(), multiSeries.get(i).getValue(j)); seriesValues.add(v); } } } /* Do all series settings. */ for (int serie = 0; serie < seriesValues.size(); serie++) //each combination of series { Values values = seriesValues.get(serie); String seriesName = (seriesValues.size() > 1) ? values.toString() : seriesNameField.getText(); // For properties that return an interval, we add a pair of series // (the pair is stored as a linked list) if (experiment.getPropertyType() instanceof TypeInterval) { SeriesKey key = graphModel2D.addSeries(seriesName + " (min)"); key.next = graphModel2D.addSeries(seriesName + " (max)"); seriesKeys.add(key); } else { seriesKeys.add(graphModel2D.addSeries(seriesName)); } } /* If there are results already, then lets render them! */ if (resultsKnown && resultsCollection.getCurrentIteration() > 0) { for (int series = 0; series < seriesValues.size(); series++) //each combination of series { Values values = seriesValues.get(series); SeriesKey seriesKey = seriesKeys.get(series); /** Range over x-axis. */ for (int i = 0; i < rangingConstantX.getNumSteps(); i++) { Object value = rangingConstantX.getValue(i); /** Values used in the one experiment for this series. */ Values useThis = new Values(); useThis.addValues(values); useThis.addValue(rangerX, value); /** Get this particular result. **/ try { Object result = resultsCollection.getResult(useThis); double x = 0, y = 0; boolean validX = true; if (value instanceof Double) { x = ((Double) value).doubleValue(); } else if (value instanceof Integer) { x = ((Integer) value).intValue(); } else { validX = false; } // Add point to graph (if of valid type) if (validX) { if (result instanceof Double) { y = ((Double) result).doubleValue(); graphModel2D.addPointToSeries(seriesKey, new PrismXYDataItem(x, y)); } else if (result instanceof Integer) { y = ((Integer) result).intValue(); graphModel2D.addPointToSeries(seriesKey, new PrismXYDataItem(x, y)); } else if (result instanceof Interval) { Interval interval = (Interval) result; if (interval.lower instanceof Double) { y = ((Double) interval.lower).doubleValue(); graphModel2D.addPointToSeries(seriesKey, new PrismXYDataItem(x, y)); y = ((Double) interval.upper).doubleValue(); graphModel2D.addPointToSeries(seriesKey.next, new PrismXYDataItem(x, y)); } else if (result instanceof Integer) { y = ((Integer) interval.lower).intValue(); graphModel2D.addPointToSeries(seriesKey, new PrismXYDataItem(x, y)); y = ((Integer) interval.upper).intValue(); graphModel2D.addPointToSeries(seriesKey.next, new PrismXYDataItem(x, y)); } } } } catch (PrismException pe) { // No result found. } } } } else if (!resultsKnown && resultsCollection.getCurrentIteration() == 0) { for (int series = 0; series < seriesValues.size(); series++) //each combination of series { Values values = seriesValues.get(series); SeriesKey seriesKey = seriesKeys.get(series); GraphResultListener listener = new GraphResultListener(graphModel2D, seriesKey, rangerX, values); resultsCollection.addResultListener(listener); } } } else if (!graphCancelled && this.plotType3d.isSelected()) { graphModel3D.setAxisLabels(selectAxisConstantCombo.getSelectedItem().toString(), selectYaxisConstantCombo.getSelectedItem().toString(), "Result"); GraphResultListener3D listener = new GraphResultListener3D(graphModel3D, rangingConstantX, rangingConstantY, seriesNameField.getText()); resultsCollection.addResultListener(listener); } }
From source file:com.hygenics.parser.KVParser.java
public void run() { log.info("Starting Parse @ " + Calendar.getInstance().getTime().toString()); ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors() * procs); Set<Callable<ArrayList<String>>> collection; List<Future<ArrayList<String>>> futures; ArrayList<String> data = new ArrayList<String>((commitsize + 10)); ArrayList<String> outdata = new ArrayList<String>(((commitsize + 10) * 3)); int currpos = 0; boolean run = true; while (run) { log.info("Getting Pages"); // get pages String query = select;//from www . j a v a 2 s . c o m if (data.size() > 0) { data.clear(); } if (extracondition != null) { query += " " + extracondition; } if (extracondition != null) { query += " WHERE " + extracondition + " AND "; } else { query += " WHERE "; } collection = new HashSet<Callable<ArrayList<String>>>(qnums); for (int i = 0; i < qnums; i++) { if (currpos + (Math.round(commitsize / qnums * (i + 1))) < currpos + commitsize) { collection.add(new SplitQuery((query + pullid + " >= " + Integer.toString(currpos + (Math.round(commitsize / qnums * (i)))) + " AND " + pullid + " < " + Integer.toString(currpos + (Math.round(commitsize / qnums * (i + 1))))))); } else { collection.add(new SplitQuery((query + pullid + " >= " + Integer.toString(currpos + (Math.round(commitsize / qnums * (i)))) + " AND " + pullid + " < " + Integer.toString(currpos + commitsize)))); } } currpos += commitsize; if (collection.size() > 0) { futures = fjp.invokeAll(collection); int w = 0; while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) { w++; } for (Future<ArrayList<String>> f : futures) { try { ArrayList<String> darr = f.get(); if (darr != null && darr.size() > 0) { data.addAll(darr); } } catch (NullPointerException e) { log.info("Some Data Returned Null"); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } } if (data.size() == 0 && checkString != null) { collection = new HashSet<Callable<ArrayList<String>>>(1); collection.add(new SplitQuery(checkString)); futures = fjp.invokeAll(collection); int w = 0; while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) { w++; } for (Future<ArrayList<String>> f : futures) { try { ArrayList<String> arr = f.get(); if (arr != null) { for (String a : arr) { if (a != null) { data.add(a); } } } if (!f.isDone()) { f.cancel(true); } f = null; } catch (NullPointerException e) { log.info("Some Data Returned Null"); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } } // parse pages if (data.size() > 0) { log.info("Parsing " + Integer.toString(data.size()) + " Records"); collection = new HashSet<Callable<ArrayList<String>>>(data.size()); for (String json : data) { Map<String, Object> jmap = Json.read(json).asMap(); // for each table in the tags Map which is a key for (String k : tags.keySet()) { collection.add(new Parser(tags.get(k), jmap.get(htmlColumn).toString(), replacePattern, replacement, jmap.get(hashColumn).toString(), hashColumn, k)); if (collection.size() + 1 == data.size() || (collection.size() % commitsize == 0 && collection.size() >= commitsize)) { log.info("Waiting for Tasks to Complete"); futures = fjp.invokeAll(collection); // post data int w = 0; while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) { w++; } for (Future<ArrayList<String>> future : futures) { try { outdata.addAll(future.get()); } catch (NullPointerException e) { log.info("Some Data Returned Null"); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } log.info("Parsed " + outdata.size() + " records!"); // post data int cp = 0; if (outdata.size() > 0) { checkTables(outdata); this.sendToDb(outdata, true); outdata = new ArrayList<String>(commitsize); } } } } data = new ArrayList<String>(commitsize); } else { log.info("No Records Found. Terminating!"); run = false; } } if (outdata.size() > 0) { log.info("Posting Last Records"); // post remaining pages for the iteration if (outdata.size() > 0) { int cp = 0; if (outdata.size() > 0) { checkTables(outdata); this.sendToDb(outdata, true); } data.clear(); outdata.clear(); } } // shutdown log.info("Complete! Shutting Down FJP."); fjp.shutdownNow(); log.info("Finished Parse @ " + Calendar.getInstance().getTime().toString()); }
From source file:com.yoctopuce.YoctoAPI.YSensor.java
/** * Retrieves error correction data points previously entered using the method * calibrateFromPoints./*w w w.j a v a2 s . co m*/ * * @param rawValues : array of floating point numbers, that will be filled by the * function with the raw sensor values for the correction points. * @param refValues : array of floating point numbers, that will be filled by the * function with the desired values for the correction points. * * @return YAPI.SUCCESS if the call succeeds. * * @throws YAPI_Exception on error */ public int loadCalibrationPoints(ArrayList<Double> rawValues, ArrayList<Double> refValues) throws YAPI_Exception { rawValues.clear(); refValues.clear(); // Load function parameters if not yet loaded if (_scale == 0) { if (load(YAPI.DefaultCacheValidity) != YAPI.SUCCESS) { return YAPI.DEVICE_NOT_FOUND; } } if (_caltyp < 0) { _throw(YAPI.NOT_SUPPORTED, "Calibration parameters format mismatch. Please upgrade your library or firmware."); return YAPI.NOT_SUPPORTED; } rawValues.clear(); refValues.clear(); for (double ii : _calraw) { rawValues.add(ii); } for (double ii : _calref) { refValues.add(ii); } return YAPI.SUCCESS; }
From source file:edu.ku.brc.specify.toycode.mexconabio.AnalysisWithGBIFToGBIF.java
@Override public void process(final int type, final int options) { calcMaxScore();/*from w w w . ja va2 s . c o m*/ String gbifSQL = "SELECT DISTINCT id, catalogue_number, genus, species, subspecies, latitude, longitude, country, state_province, collector_name, locality, year, month, day, collector_num "; String fromClause1a = "FROM raw WHERE collector_num LIKE ? AND year = ? AND genus = ?"; String fromClause1b = "FROM raw WHERE collector_num IS NULL AND year = ? AND genus = ?"; //String fromClause2 = "FROM raw WHERE collector_num IS NULL AND year = ? AND month = ? AND genus = ? AND id <> ?"; // 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 String postSQL = "FROM raw WHERE collector_num IS NOT NULL GROUP BY collector_num, year, genus"; String srcSQL = "SELECT id, catalogue_number, genus, species, subspecies, latitude, longitude, country, state_province, collector_name, locality, year, month, day, collector_num " + postSQL + " ORDER BY collector_num"; String grphashSQL = "SELECT name FROM group_hash"; String gbifgbifInsert = "INSERT INTO gbifgbif (reltype, score, GBIFID, SNIBID) VALUES (?,?,?,?)"; Statement stmt = null; PreparedStatement gStmt1a = null; PreparedStatement gStmt1b = null; //PreparedStatement gStmt2 = null; PreparedStatement gsStmt = null; Object[] refRow = new Object[NUM_FIELDS]; Object[] cmpRow = new Object[NUM_FIELDS]; long totalRecs = BasicSQLUtils.getCount(dbSrcConn, "SELECT COUNT(*) FROM group_hash"); long procRecs = 0; long startTime = System.currentTimeMillis(); int secsThreshold = 0; String blank = "X?"; PrintWriter pw = null; try { pw = new PrintWriter("scoring_gbifgbif.log"); gStmt1a = dbGBIFConn.prepareStatement(gbifSQL + fromClause1a); gStmt1b = dbGBIFConn.prepareStatement(gbifSQL + fromClause1b); //gStmt2 = dbGBIFConn.prepareStatement(gbifSQL + fromClause2); gsStmt = dbDstConn.prepareStatement(gbifgbifInsert); stmt = dbSrcConn.createStatement(ResultSet.FETCH_FORWARD, ResultSet.CONCUR_READ_ONLY); stmt.setFetchSize(Integer.MIN_VALUE); System.out.println("Starting Query... " + totalRecs); pw.println("Starting Query... " + totalRecs); System.out.flush(); pw.flush(); HashSet<Integer> idHash = new HashSet<Integer>(); int writeCnt = 0; ResultSet rs = stmt.executeQuery(grphashSQL); System.out .println(String.format("Starting Processing... Total Records %d Max Score: %d Threshold: %d", totalRecs, maxScore, thresholdScore)); pw.println(String.format("Starting Processing... Total Records %d Max Score: %d Threshold: %d", totalRecs, maxScore, thresholdScore)); System.out.flush(); pw.flush(); Vector<Object[]> group = new Vector<Object[]>(); ArrayList<Integer> ids = new ArrayList<Integer>(); while (rs.next()) { String[] tokens = StringUtils.split(rs.getString(1), '_'); String colNum = tokens[0].trim(); String year = tokens[1].trim(); String genus = tokens[2].trim(); if (StringUtils.isEmpty(colNum) || colNum.equals(blank)) colNum = null; if (StringUtils.isEmpty(year) || year.equals(blank)) year = null; if (StringUtils.isEmpty(genus) || genus.equals(blank)) genus = null; PreparedStatement gStmt1; if (colNum != null) { gStmt1 = gStmt1a; gStmt1.setString(1, "%" + colNum + "%"); } else { gStmt1 = gStmt1b; gStmt1.setString(1, null); } gStmt1.setString(2, year); gStmt1.setString(3, genus); ResultSet gRS = gStmt1.executeQuery(); ids.clear(); int maxNonNullTot = -1; int maxNonNullInx = -1; int inx = 0; while (gRS.next()) { Object[] row = getRow(); int cnt = fillRowWithScore(row, gRS); if (cnt > maxNonNullTot) { maxNonNullInx = inx; maxNonNullTot = cnt; } group.add(row); ids.add(gRS.getInt(1)); inx++; } gRS.close(); if (inx < 2) { for (Object[] r : group) { recycleRow(r); } group.clear(); continue; } System.arraycopy(group.get(maxNonNullInx), 0, refRow, 0, refRow.length); Integer srcId = ids.get(maxNonNullInx); for (int i = 0; i < group.size(); i++) { if (i != maxNonNullInx) { int score = score(refRow, group.get(i)); if (score > thresholdScore) { writeCnt++; int gbifID = ids.get(i); gsStmt.setInt(1, 1); // reltype gsStmt.setInt(2, score); // score gsStmt.setInt(3, gbifID); gsStmt.setInt(4, srcId); gsStmt.executeUpdate(); idHash.add(gbifID); } } } idHash.clear(); for (Object[] r : group) { recycleRow(r); } group.clear(); if (gStmt1 == gStmt1b) { continue; } gStmt1 = gStmt1b; gStmt1.setString(1, year); gStmt1.setString(2, genus); gRS = gStmt1.executeQuery(); while (gRS.next()) { fillRowWithScore(cmpRow, gRS); int gbifID = gRS.getInt(1); if (gbifID == srcId) continue; int score = score(refRow, cmpRow); if (score > thresholdScore) { writeCnt++; gsStmt.setInt(1, 1); // reltype gsStmt.setInt(2, score); // score gsStmt.setInt(3, gbifID); gsStmt.setInt(4, srcId); gsStmt.executeUpdate(); } } gRS.close(); procRecs++; if (procRecs % 500 == 0) { long endTime = System.currentTimeMillis(); long elapsedTime = endTime - startTime; double timePerRecord = (elapsedTime / procRecs); double hrsLeft = ((totalRecs - procRecs) * timePerRecord) / HRS; int seconds = (int) (elapsedTime / 60000.0); if (secsThreshold != seconds) { secsThreshold = seconds; String msg = String.format("Elapsed %8.2f hr.mn Percent: %6.3f Hours Left: %8.2f ", ((double) (elapsedTime)) / HRS, 100.0 * ((double) procRecs / (double) totalRecs), hrsLeft); System.out.println(msg); pw.println(msg); pw.flush(); } } } rs.close(); System.out.println("Done."); pw.println("Done."); } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (stmt != null) { stmt.close(); } if (gStmt1a != null) { gStmt1a.close(); } if (gStmt1b != null) { gStmt1b.close(); } /*if (gStmt2 != null) { gStmt2.close(); }*/ } catch (Exception ex) { } } System.out.println("Done."); pw.println("Done."); pw.flush(); pw.close(); }
From source file:io.github.minecraftgui.controllers.NetworkController.java
public void sortPlugins() { ArrayList<PluginInfo> pluginsToRemove = new ArrayList<>(); ArrayList<PluginInfo> pluginsAdded = new ArrayList<>(); int i, j;/*from w w w.j av a 2 s .c om*/ while (pluginsInfo.size() != 0) { for (i = 0; i < pluginsInfo.size(); i++) { PluginInfo pluginInfo = pluginsInfo.get(i); if (pluginInfo.dependencies.size() == 0) { pluginsAdded.add(pluginInfo); pluginsToRemove.add(pluginInfo); } else { int nbDependenciesToFind = pluginInfo.dependencies.size(); for (j = 0; j < pluginsAdded.size(); j++) { PluginInfo pluginInfoAdded = pluginsAdded.get(j); if (pluginInfo.dependencies.contains(pluginInfoAdded.name)) nbDependenciesToFind--; } if (nbDependenciesToFind == 0) { pluginsAdded.add(pluginInfo); pluginsToRemove.add(pluginInfo); } } } if (pluginsToRemove.size() == 0) pluginsInfo.clear(); else pluginsInfo.removeAll(pluginsToRemove); pluginsToRemove.clear(); } pluginsInfo.clear(); for (PluginInfo pluginInfo : pluginsAdded) pluginsInfo.add(pluginInfo); }
From source file:can.yrt.onebusaway.ArrivalsListFragment.java
private void setRoutesFilter(boolean[] checks) { final int len = checks.length; final ArrayList<String> newFilter = new ArrayList<String>(len); ObaArrivalInfoResponse response = getArrivalsLoader().getLastGoodResponse(); final List<ObaRoute> routes = response.getRoutes(mStop.getRouteIds()); assert (routes.size() == len); for (int i = 0; i < len; ++i) { final ObaRoute route = routes.get(i); if (checks[i]) { newFilter.add(route.getId()); }//from w w w. j a v a 2 s. c om } // If the size of the filter is the number of routes // (i.e., the user selected every checkbox) act then // don't select any. if (newFilter.size() == len) { newFilter.clear(); } setRoutesFilter(newFilter); mHeader.refresh(); }
From source file:com.yoctopuce.YoctoAPI.YMessageBox.java
public ArrayList<Integer> gsm2unicode(byte[] gsm) { int i;/*from w w w . java 2s.co m*/ int gsmlen; int reslen; ArrayList<Integer> res = new ArrayList<Integer>(); int uni; if (!(_gsm2unicodeReady)) { initGsm2Unicode(); } gsmlen = (gsm).length; reslen = gsmlen; i = 0; while (i < gsmlen) { if (gsm[i] == 27) { reslen = reslen - 1; } i = i + 1; } res.clear(); i = 0; while (i < gsmlen) { uni = _gsm2unicode.get(gsm[i]).intValue(); if ((uni == 27) && (i + 1 < gsmlen)) { i = i + 1; uni = gsm[i]; if (uni < 60) { if (uni < 41) { if (uni == 20) { uni = 94; } else { if (uni == 40) { uni = 123; } else { uni = 0; } } } else { if (uni == 41) { uni = 125; } else { if (uni == 47) { uni = 92; } else { uni = 0; } } } } else { if (uni < 62) { if (uni == 60) { uni = 91; } else { if (uni == 61) { uni = 126; } else { uni = 0; } } } else { if (uni == 62) { uni = 93; } else { if (uni == 64) { uni = 124; } else { if (uni == 101) { uni = 164; } else { uni = 0; } } } } } } if (uni > 0) { res.add(uni); } i = i + 1; } return res; }
From source file:org.apache.hadoop.mapred.JobIDDeamon.java
private void cleanTmpPath(ArrayList<JobID> jobIDs) { LOG.info("======JobIDDeamon.cleanTmpPath()======"); ArrayList<Path> jobIDsTmpPath = new ArrayList<Path>(); FileSystem fs = null;/*from ww w.j a v a 2 s . c om*/ Path sysDir = new Path(this.jobSubmitClient.getSystemDir()); //LOG.info("=====sysDir ==" + sysDir + "======"); //for(JobID jobID : jobIDs){ // jobIDsTmpPath.add(new Path(new Path(this.jobSubmitClient.getSystemDir()).getParent(), jobID.toString())); //} Path sysParentDir = sysDir.getParent(); try { fs = sysParentDir.getFileSystem(this.conf); FileStatus[] files = fs.listStatus(sysParentDir); for (FileStatus file : files) { if (checkFile(jobIDs, file) == true || file.getPath().equals(sysDir) == true) { continue; } else { LOG.info("=====delete " + file.getPath() + "======"); fs.delete(file.getPath(), true); } } } catch (IOException e) { e.printStackTrace(); } jobIDs.clear(); }
From source file:com.datamelt.nifi.processors.ExecuteRuleEngine.java
/** * generates the flow files for each row of data in the flow file in the form of a list * //from ww w. ja v a 2 s . c om * @param context process context * @param session process session * @param rows list of rows from the flow file content * @param header the header row * @param headerPresent indicator from the configuration if a header is present * @param propertyMap map of properties of the flow file * @return list of flow files */ private List<FlowFile> generateFlowFileSplits(ProcessContext context, ProcessSession session, ArrayList<RuleEngineRow> rows, HeaderRow header, boolean headerPresent) { List<FlowFile> splitFlowFiles = new ArrayList<>(); for (int i = 0; i < rows.size(); i++) { FlowFile splitFlowFile = session.create(); splitFlowFile = updateFlowFileContent(header, headerPresent, context, session, splitFlowFile, rows.get(i)); // put the properties in the flow file splitFlowFile = session.putAllAttributes(splitFlowFile, rows.get(i).getMap()); splitFlowFiles.add(splitFlowFile); } rows.clear(); getLogger().debug("created list of " + splitFlowFiles.size() + " flowfiles"); return splitFlowFiles; }