List of usage examples for java.lang Double MIN_VALUE
double MIN_VALUE
To view the source code for java.lang Double MIN_VALUE.
Click Source Link
From source file:tufts.vue.ds.Field.java
private void flushStats(boolean init) { if (!init)//ww w . j av a2s . c o m Log.debug("flushing " + this); // reset to initial defaults mValues.clear(); mValuesSeen = 0; mValueTrackDisabled = false; mAllValuesUnique = true; mAllValuesAreIntegers = true; mMaxValueLen = 0; mType = TYPE_INTEGER; mTypeDetermined = false; mDataComments.clear(); mMinValue = Double.MAX_VALUE; mMaxValue = Double.MIN_VALUE; mValuesTotal = 0; mMeanValue = 0; mMedianValue = 0; mStandardDeviation = 0; mQuantiles = null; // we keep the nodeStyle, which is the whole reason we use a flush instead of // just creating new Schema+Field objects when reloading. Tho at this point, // may be easier to re-create all & just carry over the styles. }
From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport.java
/** * * @param variablesNames_StringArray// ww w . j ava2s . c om * @param aggregatedSARData * @param comparisonCASData * @return * @throws IOException */ public static Object[] getRegressionParametersAndCreateXYLineCharts(String[] variablesNames_StringArray, double[][] aggregatedSARData, double[][] comparisonCASData) throws IOException { Object[] result = new Object[3]; Object[] t_RegressionParameters = new Object[comparisonCASData.length]; JFreeChart[] t_regressionXYLineCharts = new JFreeChart[comparisonCASData.length]; JFreeChart[] t_yequalsxXYLineCharts = new JFreeChart[comparisonCASData.length]; result[0] = t_RegressionParameters; result[1] = t_regressionXYLineCharts; result[2] = t_yequalsxXYLineCharts; String title = null; // String xAxisLabel; // String yAxisLabel; // xAxisLabel = new String("CAS Estimation (Observed)"); // yAxisLabel = new String("SAR Prediction (Expected)"); boolean legend = false; boolean tooltips = false; boolean urls = false; double[][] data; double[] a_RegressionParameters; for (int i = 0; i < comparisonCASData.length; i++) { title = variablesNames_StringArray[i + 1]; double[][] bounds = new double[2][2]; double[][] regressionLineChartData = new double[2][2]; double[][] yequalsxLineChartData = new double[2][2]; bounds[0][0] = Double.MAX_VALUE;// xmin SAR; bounds[0][1] = Double.MIN_VALUE;// xmax SAR; bounds[1][0] = Double.MAX_VALUE;// ymin CAS; bounds[1][1] = Double.MIN_VALUE;// ymax CAS; data = new double[2][comparisonCASData[i].length]; for (int j = 0; j < comparisonCASData[i].length; j++) { // data[0][j] = comparisonCASData[i][j]; // data[1][j] = aggregatedSARData[i][j]; data[0][j] = aggregatedSARData[i][j]; data[1][j] = comparisonCASData[i][j]; bounds[0][0] = Math.min(bounds[0][0], comparisonCASData[i][j]); bounds[0][1] = Math.max(bounds[0][1], comparisonCASData[i][j]); bounds[1][0] = Math.min(bounds[1][0], aggregatedSARData[i][j]); bounds[1][1] = Math.max(bounds[1][1], aggregatedSARData[i][j]); } System.out.println("xmin SAR " + bounds[0][0]); System.out.println("xmax SAR " + bounds[0][1]); System.out.println("ymin CAS " + bounds[1][0]); System.out.println("ymax CAS " + bounds[1][1]); // intercept, slope, RSquare double[] aSimpleRegressionParameters = printSimpleRegression(data); // // intercept, slope, RSquare? // double[] aSimpleOLSParameters = printOLSRegression(data); double[] usedRegressionParameters = aSimpleRegressionParameters; // // Force origin to be (0,0) // yequalsxLineChartData[0][0] = 0.0d; // yequalsxLineChartData[1][0] = 0.0d; // Get intercept on x or y axis if (bounds[0][0] > bounds[1][0]) { yequalsxLineChartData[0][0] = bounds[0][0]; yequalsxLineChartData[1][0] = bounds[0][0]; if (usedRegressionParameters[0] < bounds[0][0]) { regressionLineChartData[0][0] = (bounds[0][0] * usedRegressionParameters[1]) + usedRegressionParameters[0]; regressionLineChartData[1][0] = 0.0d; } else { regressionLineChartData[0][0] = 0.0d; regressionLineChartData[1][0] = usedRegressionParameters[0]; } } else { yequalsxLineChartData[0][0] = bounds[1][0]; yequalsxLineChartData[1][0] = bounds[1][0]; if (usedRegressionParameters[0] < bounds[1][0]) { // regressionLineChartData[0][0] = // (bounds[1][0] * usedRegressionParameters[1]) // + usedRegressionParameters[0]; regressionLineChartData[0][0] = (bounds[1][0] - usedRegressionParameters[0]) / usedRegressionParameters[1]; regressionLineChartData[1][0] = 0.0d; } else { regressionLineChartData[0][0] = 0.0d; regressionLineChartData[1][0] = usedRegressionParameters[0]; } } // Get intercept on edge of graph if (bounds[0][1] > bounds[1][1]) { yequalsxLineChartData[0][1] = bounds[1][1]; yequalsxLineChartData[1][1] = bounds[1][1]; // regressionLineChartData[0][1] = // (yequalsxLineChartData[1][1] - usedRegressionParameters[0]) // / usedRegressionParameters[1]; regressionLineChartData[1][1] = (yequalsxLineChartData[1][1] * usedRegressionParameters[1]) + usedRegressionParameters[1]; // regressionLineChartData[1][1] = // (yequalsxLineChartData[0][1] - usedRegressionParameters[0]) // / usedRegressionParameters[1]; regressionLineChartData[0][1] = yequalsxLineChartData[1][1]; if (regressionLineChartData[1][1] > bounds[1][1] && regressionLineChartData[1][1] > bounds[0][1]) { regressionLineChartData[1][1] = yequalsxLineChartData[0][1]; regressionLineChartData[0][1] = (regressionLineChartData[1][1] - usedRegressionParameters[0]) / usedRegressionParameters[1]; } } else { yequalsxLineChartData[0][1] = bounds[0][1]; yequalsxLineChartData[1][1] = bounds[0][1]; regressionLineChartData[0][1] = (yequalsxLineChartData[0][1] - usedRegressionParameters[0]) / usedRegressionParameters[1]; regressionLineChartData[1][1] = yequalsxLineChartData[0][1]; if (regressionLineChartData[0][1] > bounds[0][1]) { regressionLineChartData[1][1] = yequalsxLineChartData[0][1]; // regressionLineChartData[0][1] = // (regressionLineChartData[1][1] * usedRegressionParameters[1]) // + usedRegressionParameters[1]; regressionLineChartData[0][1] = (regressionLineChartData[1][1] - usedRegressionParameters[0]) / usedRegressionParameters[1]; } } System.out.println("Regression line"); t_RegressionParameters[i] = usedRegressionParameters; System.out.println( "(minx,miny) (" + regressionLineChartData[0][0] + "," + regressionLineChartData[1][0] + ")"); System.out.println( "(maxx,maxy) (" + regressionLineChartData[0][1] + "," + regressionLineChartData[1][1] + ")"); DefaultXYDataset regressionLineDefaultXYDataset = new DefaultXYDataset(); regressionLineDefaultXYDataset.addSeries("Regression Line", regressionLineChartData); t_regressionXYLineCharts[i] = ChartFactory.createXYLineChart(title, "", //xAxisLabel, "", //yAxisLabel, regressionLineDefaultXYDataset, //PlotOrientation.HORIZONTAL, PlotOrientation.VERTICAL, legend, tooltips, urls); System.out.println("Y = X line"); System.out.println( "(minx,miny) (" + yequalsxLineChartData[0][0] + "," + yequalsxLineChartData[1][0] + ")"); System.out.println( "(maxx,maxy) (" + yequalsxLineChartData[0][1] + "," + yequalsxLineChartData[1][1] + ")"); DefaultXYDataset yequalsxLineDefaultXYDataset = new DefaultXYDataset(); yequalsxLineDefaultXYDataset.addSeries("y = x", yequalsxLineChartData); t_yequalsxXYLineCharts[i] = ChartFactory.createXYLineChart(title, "", //xAxisLabel, "", //yAxisLabel, yequalsxLineDefaultXYDataset, PlotOrientation.VERTICAL, //PlotOrientation.HORIZONTAL, legend, tooltips, urls); } return result; }
From source file:ubic.basecode.math.DescriptiveWithMissing.java
public static double max(DoubleArrayList input) { int size = input.size(); if (size == 0) throw new IllegalArgumentException(); double[] elements = input.elements(); double max = Double.MIN_VALUE; for (int i = 0; i < size; i++) { if (Double.isNaN(elements[i])) continue; if (elements[i] > max) max = elements[i];//from ww w. j a v a 2 s . c o m } return max; }
From source file:org.lpe.common.util.LpeNumericUtils.java
/** * Remove outliers in values by calculating the distance to the neighbor * points. If the distance of a point to its neighbors is quite high, the * point is considered as an outlier.//w w w . j ava2s .c om * * @param list * list of pairs * @param noiseThreshold * noise threshold * @param percentile * if noise threshold is not used, percentile of the noise is * used as threshold * @param windowSize * window size to include neighbors for consideration * * @param <T> * value type * @return filtered list of pairs */ public static <T extends Number> NumericPairList<T, Double> removeNoiseInValues(NumericPairList<T, Double> list, double noiseThreshold, double percentile, int windowSize) { NumericPairList<T, Double> result = new NumericPairList<>(); if (noiseThreshold > 0) { double[] noiseMetrics = new double[list.size()]; double maxNoise = Double.MIN_VALUE; for (int i = 0; i < list.size(); i++) { double sum = 0; double count = 0; for (int j = i - windowSize / 2; j <= i + windowSize / 2; j++) { if (j < 0 || j >= list.size() || i == j) { continue; } sum += Math.abs(list.get(i).getValue() - list.get(j).getValue()); count += 1.0; } noiseMetrics[i] = sum / count; if (noiseMetrics[i] > maxNoise) { maxNoise = noiseMetrics[i]; } } for (int i = 0; i < noiseMetrics.length; i++) { double relativeNoise = noiseMetrics[i] / maxNoise; if (relativeNoise < noiseThreshold) { result.add(new NumericPair<T, Double>(list.get(i).getKey(), list.get(i).getValue())); } } } else { double[] noiseMetrics = new double[list.size()]; double maxNoise = Double.MIN_VALUE; for (int i = 0; i < list.size(); i++) { double sum = 0; double count = 0; for (int j = i - windowSize / 2; j <= i + windowSize / 2; j++) { if (j < 0 || j >= list.size() || i == j) { continue; } sum += Math.abs(list.get(i).getValue() - list.get(j).getValue()); count += 1.0; } noiseMetrics[i] = sum / count; if (noiseMetrics[i] > maxNoise) { maxNoise = noiseMetrics[i]; } } List<Double> noisemetricsList = new ArrayList<>(); for (int i = 0; i < noiseMetrics.length; i++) { noiseMetrics[i] = noiseMetrics[i] / maxNoise; noisemetricsList.add(noiseMetrics[i]); } Collections.sort(noisemetricsList); int percentileIx = (int) (((double) noisemetricsList.size()) * percentile); noiseThreshold = noisemetricsList.get(percentileIx); for (int i = 0; i < noiseMetrics.length; i++) { if (noiseMetrics[i] < noiseThreshold) { result.add(new NumericPair<T, Double>(list.get(i).getKey(), list.get(i).getValue())); } } } return result; }
From source file:org.openpnp.machine.reference.camera.TableScannerCamera.java
private synchronized void initialize() throws Exception { stop();//from ww w . j a va 2 s.c o m sourceUrl = new URL(sourceUri); cacheDirectory = new File(Configuration.get().getResourceDirectory(getClass()), DigestUtils.shaHex(sourceUri)); if (!cacheDirectory.exists()) { cacheDirectory.mkdirs(); } File[] files = null; // Attempt to get the list of files from the source. try { files = loadSourceFiles(); } catch (Exception e) { logger.warn("Unable to load file list from {}", sourceUri); logger.warn("Reason", e); } if (files == null) { files = loadCachedFiles(); } if (files.length == 0) { throw new Exception("No source or cached files found."); } // Load the first image we found and use it's properties as a template // for the rest of the images. BufferedImage templateImage = new Tile(0, 0, files[0]).getImage(); width = templateImage.getWidth(); height = templateImage.getHeight(); tileList = new ArrayList<Tile>(); lastX = Double.MIN_VALUE; lastY = Double.MIN_VALUE; lastCenterTile = null; // We build a set of unique X and Y positions that we see so we can // later build a two dimensional array of the riles TreeSet<Double> uniqueX = new TreeSet<Double>(); TreeSet<Double> uniqueY = new TreeSet<Double>(); // Create a map of the tiles so that we can quickly find them when we // build the array. Map<Tile, Tile> tileMap = new HashMap<Tile, Tile>(); // Parse the filenames of the all the files and add their coordinates // to the sets and map. for (File file : files) { String filename = file.getName(); filename = filename.substring(0, filename.indexOf(".png")); String[] xy = filename.split(","); double x = Double.parseDouble(xy[0]); double y = Double.parseDouble(xy[1]); Tile tile = new Tile(x, y, file); uniqueX.add(x); uniqueY.add(y); tileMap.put(tile, tile); tileList.add(tile); } // Create a two dimensional array to store all the of the tiles tiles = new Tile[uniqueX.size()][uniqueY.size()]; // Iterate through all the unique X and Y positions that were found // and add each file to the two dimensional array in the position // where it belongs int x = 0, y = 0; for (Double xPos : uniqueX) { y = 0; for (Double yPos : uniqueY) { Tile tile = tileMap.get(new Tile(xPos, yPos, null)); tiles[x][y] = tile; tile.setTileX(x); tile.setTileY(y); y++; } x++; } /* * Create a buffer that we will render the center tile and it's * surrounding tiles to. */ buffer = new BufferedImage(templateImage.getWidth() * tilesWide, templateImage.getHeight() * tilesHigh, BufferedImage.TYPE_INT_ARGB); if (listeners.size() > 0) { start(); } }
From source file:edu.rice.cs.bioinfo.programs.phylonet.commands.SearchBranchLengthsMaxGTProb.java
@Override protected String produceResult() { StringBuffer result = new StringBuffer(); final List<Tree> geneTrees = new ArrayList<Tree>(); final List<Integer> counter = new ArrayList<Integer>(); for (NetworkNonEmpty geneTree : _geneTrees) { String phylonetGeneTree = NetworkTransformer.toENewickTree(geneTree); NewickReader nr = new NewickReader(new StringReader(phylonetGeneTree)); STITree<Double> newtr = new STITree<Double>(true); try {/*from w w w . ja v a 2 s. co m*/ nr.readTree(newtr); } catch (Exception e) { errorDetected.execute(e.getMessage(), this._motivatingCommand.getLine(), this._motivatingCommand.getColumn()); } boolean found = false; int index = 0; for (Tree tr : geneTrees) { if (Trees.haveSameRootedTopology(tr, newtr)) { found = true; break; } index++; } if (found) { counter.set(index, counter.get(index) + 1); } else { geneTrees.add(newtr); counter.add(1); } } NetworkFactoryFromRNNetwork transformer = new NetworkFactoryFromRNNetwork(); final Network<Double> speciesNetwork = transformer.makeNetwork(_speciesNetwork); /* * Make the branch length of every edge initially 1 if no initial user value is specified. * Make the hybrid prob of every hybrid edge initially .5 if no initial user value is specified. */ for (NetNode<Double> parent : speciesNetwork.bfs()) { for (NetNode<Double> child : parent.getChildren()) { double initialBL = child.getParentDistance(parent); if (initialBL == NetNode.NO_DISTANCE || Double.isNaN(initialBL)) // no specification from user on branch length initialBL = 1.0; child.setParentDistance(parent, initialBL); if (child.getParentCount() == 2) { for (NetNode<Double> hybridParent : child.getParents()) { if (child.getParentProbability(hybridParent) == 1.0) // no specification from user on hybrid prob { child.setParentProbability(parent, 0.5); } } } } } /* * Try to assign branch lengths and hybrid probs to increase GTProb from the initial network. * Except branch lengths of leaf edges. They don't impact GTProb. */ // def: a round is an attempt to tweak each branch length and each hybrid prob. boolean continueRounds = true; // keep trying to improve network final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>( _computeGTProbStrategy.execute(speciesNetwork, geneTrees, counter)); // records the GTProb of the network at all times int assigmentRound = 0; for (; assigmentRound < _assigmentRounds && continueRounds; assigmentRound++) { /* * Prepare a random ordering of network edge examinations each of which attempts to change a branch length or hybrid prob to improve the GTProb score. */ double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents(); List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here. Will execute them one by one later. // add branch length adjustments to the list for (final NetNode<Double> parent : speciesNetwork.bfs()) { for (final NetNode<Double> child : parent.getChildren()) { if (!parent.isRoot()) // jd tmp continue; if (child.isLeaf()) // leaf edge, skip continue; assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedBranchLength) { // brent suggests a new branch length double incumbentBranchLength = child.getParentDistance(parent); // mutate and see if it yields an improved network child.setParentDistance(parent, suggestedBranchLength); double lnProb = _computeGTProbStrategy.execute(speciesNetwork, geneTrees, counter); RnNewickPrinter<Double> rnNewickPrinter = new RnNewickPrinter<Double>(); StringWriter sw = new StringWriter(); rnNewickPrinter.print(speciesNetwork, sw); // String inferredNetwork = sw.toString(); // System.out.println(inferredNetwork + "\t" + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change { lnGtProbOfSpeciesNetwork.setContents(lnProb); // System.out.println("(improved)"); } else // didn't improve, roll back change { child.setParentDistance(parent, incumbentBranchLength); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(.000000000001, .0000000000000001); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxAssigmentAttemptsPerBranchParam, functionToOptimize, GoalType.MAXIMIZE, Double.MIN_VALUE, _maxBranchLength); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } // System.out.println("-----------------------------------------------------------------------"); } }); } } // add hybrid probs to hybrid edges for (final NetNode<Double> child : speciesNetwork.bfs()) // find every hybrid node { if (child.isRoot()) // calling getParentNumber on root causes NPE. Bug workaround. continue; if (child.getParentCount() == 2) // hybrid node { Iterator<NetNode<Double>> hybridParents = child.getParents().iterator(); final NetNode<Double> hybridParent1 = hybridParents.next(); final NetNode<Double> hybridParent2 = hybridParents.next(); assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedProb) { double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1); // try new pair of hybrid probs child.setParentProbability(hybridParent1, suggestedProb); child.setParentProbability(hybridParent2, 1.0 - suggestedProb); double lnProb = _computeGTProbStrategy.execute(speciesNetwork, geneTrees, counter); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // change did not improve, roll back { child.setParentProbability(hybridParent1, incumbentHybridProbParent1); child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(.000000000001, .0000000000000001); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxAssigmentAttemptsPerBranchParam, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } } }); } } // Collections.shuffle(assigmentActions); // randomize the order we will try to adjust network edge properties for (Proc assigment : assigmentActions) // for each change attempt, perform attempt { assigment.execute(); } if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment { continueRounds = false; } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching { double improvementPercentage = Math.pow(Math.E, (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching { continueRounds = false; } } else { throw new IllegalStateException("Should never have decreased prob."); } } RnNewickPrinter<Double> rnNewickPrinter = new RnNewickPrinter<Double>(); StringWriter sw = new StringWriter(); rnNewickPrinter.print(speciesNetwork, sw); String inferredNetwork = sw.toString(); this.richNewickGenerated(inferredNetwork); result.append( "\nTotal log probability: " + lnGtProbOfSpeciesNetwork.getContents() + ": " + inferredNetwork); return result.toString(); }
From source file:qtiscoringengine.QTIRubric.java
public String getOneCorrectValue(String identifier) { if (identifier == null) return null; ResponseDeclaration rd = getResponseDeclaration(identifier); if (rd == null) return null; DataElement de = rd.getCorrectValue(); if (de == null) return null; if (de.getIsContainer()) { DEContainer dc = (DEContainer) de; if (dc.getMemberCount() > 0) { if (rd.getCardinality() == Cardinality.Ordered) { return dc.getStringValue(); }/*w ww . j a v a 2s .co m*/ double maxScore = Double.MIN_VALUE; DataElement elem = null; for (int i = 0; i < dc.getMemberCount(); i++) { if (i == 0) elem = dc.getMember(i); // this is so we always get a value. // Setting default value to the first one Double d = rd.getScore(dc.getMember(i)); if (d != null) if (d > maxScore) { maxScore = (double) d; elem = dc.getMember(i); } } return elem == null ? null : elem.getStringValue(); } else return null; } return de.getStringValue(); }
From source file:controller.VisLP.java
private static void scopeArea(CCSystem cs, Point2D[] points, boolean origo) { // No feasible points. Don't do anything. if (points.length == 0) return;/*from w ww. j a va2 s .co m*/ if (points.length == 1) origo = true; double loX = origo ? 0 : Double.MAX_VALUE; double hiX = Double.MIN_VALUE; double loY = origo ? 0 : Double.MAX_VALUE; double hiY = Double.MIN_VALUE; for (Point2D p : points) { double x = p.getX(); double y = p.getY(); if (x < loX) loX = x; if (x > hiX) hiX = x; if (y < loY) loY = y; if (y > hiY) hiY = y; } if (loX == hiX) hiX = loX + 0.001; if (loY == hiY) hiY = loY + 0.001; double distX = hiX - loX; double distY = hiY - loY; cs.move(loX - distX * 0.1, hiX + distX * 0.1, loY - distY * 0.1, hiY + distY * 0.1); }
From source file:org.opencommercesearch.feed.BaseRestFeed.java
/** * Start running this feed./* w w w . jav a 2 s . c o m*/ * @throws RepositoryException If there are problems reading the repository items from the database. */ public void startFeed() throws RepositoryException, IOException { if (!isEnabled()) { if (isLoggingInfo()) { logInfo("Did not start feed for " + itemDescriptorName + " since is disabled. Verify your configuration is correct."); } return; } long startTime = System.currentTimeMillis(); int processed = 0; int failed = 0; RepositoryView itemView = getRepository().getView(itemDescriptorName); int count = countRql.executeCountQuery(itemView, null); int errorThreshold = getErrorThreshold() <= Double.MIN_VALUE ? count : (int) Math.ceil((count * getErrorThreshold())); if (isLoggingInfo()) { logInfo("Started " + itemDescriptorName + " feed for " + count + " items."); } try { long feedTimestamp = System.currentTimeMillis(); if (count > 0) { if (isTransactional()) { sendDeleteByQuery(); } Integer[] rqlArgs = new Integer[] { 0, getBatchSize() }; RepositoryItem[] items = rql.executeQueryUncached(itemView, rqlArgs); while (items != null) { try { int sent = sendItems(items, feedTimestamp); processed += sent; failed += items.length - sent; } catch (Exception ex) { if (isLoggingError()) { logError("Cannot send " + itemDescriptorName + "[" + getIdsFromItemsArray(items) + "]", ex); } // Retry one by one if (isLoggingInfo()) { logInfo("Retrying batch one by one"); } for (RepositoryItem item : items) { if (item == null) { continue; } RepositoryItem[] onlyOneItem = new RepositoryItem[] { item }; try { int sent = sendItems(onlyOneItem, feedTimestamp); processed += sent; failed += onlyOneItem.length - sent; } catch (Exception ex2) { failed++; if (isLoggingError()) { logError("Cannot resend " + itemDescriptorName + "[" + getIdsFromItemsArray(onlyOneItem) + "]", ex2); } } } } if (isLoggingInfo()) { logInfo("Processed " + processed + " " + itemDescriptorName + " items out of " + count + " with " + failed + " failures"); } if (failed < errorThreshold) { //Get the next batch only if the feed is performing well. rqlArgs[0] += getBatchSize(); items = rql.executeQueryUncached(itemView, rqlArgs); } else { //Error threshold reached. Stop. break; } } if (failed < errorThreshold) { //Send commit or deletes if the feeds looks healthy. if (isTransactional()) { sendCommit(); } else { sendDelete(feedTimestamp); } } else { if (isLoggingError()) { logError(itemDescriptorName + " feed interrupted since it seems to be failing too often. At least " + (getErrorThreshold() * 100) + "% out of " + count + " items had errors"); } if (isTransactional()) { //Roll back as much as we can from the changes done before the threshold was reached (specially initial delete) sendRollback(); } } } else { if (isLoggingInfo()) { logInfo("No " + itemDescriptorName + " items found. Nothing to do here."); } } } catch (Exception e) { if (isLoggingError()) { logError("Error while processing feed.", e); } if (isTransactional()) { sendRollback(); } } if (isLoggingInfo()) { logInfo(itemDescriptorName + " feed finished in " + ((System.currentTimeMillis() - startTime) / 1000) + " seconds, " + processed + " processed items and " + failed + " failures."); } }
From source file:pipeline.GUI_utils.JXTablePerColumnFiltering.java
public void updateRangeOfColumn(int columnIndex, boolean reinitializeSelection, int boundsToUpdate, boolean suppressModelInit) { if (!suppressModelInit) { needToInitializeFilterModel = true; initializeFilterModel();//from ww w . j ava 2 s . co m } boolean isFloat = model.getValueAt(0, columnIndex) instanceof Float; boolean isDouble = model.getValueAt(0, columnIndex) instanceof Double; boolean isInteger = model.getValueAt(0, columnIndex) instanceof Integer; boolean isSpreadsheetCell = model.getValueAt(0, columnIndex) instanceof SpreadsheetCell; if (!(isFloat || isInteger || isSpreadsheetCell || isDouble)) return; double min = Double.MAX_VALUE; double max = Double.MIN_VALUE; double[] valuesForHistogram = new double[model.getRowCount()]; for (int i = 0; i < model.getRowCount(); i++) { double value; if (isFloat) value = (Float) model.getValueAt(i, columnIndex); else if (isDouble) value = (Double) model.getValueAt(i, columnIndex); else if (isInteger) value = (Integer) model.getValueAt(i, columnIndex); else { value = ((SpreadsheetCell) model.getValueAt(i, columnIndex)).getFloatValue(); } if (Double.isNaN(value)) value = 0.0d; if (value < min) min = value; if (value > max) max = value; valuesForHistogram[i] = value; } // Now compute a histogram; this could be optimized HistogramDataset dataset = new HistogramDataset(); dataset.setType(HistogramType.RELATIVE_FREQUENCY); dataset.addSeries("Histogram", valuesForHistogram, 15); if (isFloat || isDouble || isSpreadsheetCell) { FloatRangeParameter param = (FloatRangeParameter) filteringModel.getValueAt(0, columnIndex); param.histogram = dataset; float[] currentValue = (float[]) param.getValue(); if ((boundsToUpdate == BOTH_BOUNDS) || boundsToUpdate == LOWER_BOUND) currentValue[2] = (float) min; if ((boundsToUpdate == BOTH_BOUNDS) || boundsToUpdate == UPPER_BOUND) currentValue[3] = (float) max; if (reinitializeSelection) { currentValue[0] = currentValue[2]; currentValue[1] = currentValue[3]; } param.setValueFireIfAppropriate(currentValue, false, true, true); } else { IntRangeParameter param = (IntRangeParameter) filteringModel.getValueAt(0, columnIndex); int[] currentValue = (int[]) param.getValue(); if ((boundsToUpdate == BOTH_BOUNDS) || boundsToUpdate == LOWER_BOUND) currentValue[2] = (int) min; if ((boundsToUpdate == BOTH_BOUNDS) || boundsToUpdate == UPPER_BOUND) currentValue[3] = (int) max; if (reinitializeSelection) { currentValue[0] = currentValue[2]; currentValue[1] = currentValue[3]; } param.setValueFireIfAppropriate(currentValue, false, true, true); } }