List of usage examples for java.lang Double compare
public static int compare(double d1, double d2)
From source file:ml.shifu.shifu.core.dtrain.dt.DTWorker.java
@Override public void init(WorkerContext<DTMasterParams, DTWorkerParams> context) { Properties props = context.getProps(); try {// w w w .j a va 2 s . c om SourceType sourceType = SourceType .valueOf(props.getProperty(CommonConstants.MODELSET_SOURCE_TYPE, SourceType.HDFS.toString())); this.modelConfig = CommonUtils.loadModelConfig(props.getProperty(CommonConstants.SHIFU_MODEL_CONFIG), sourceType); this.columnConfigList = CommonUtils .loadColumnConfigList(props.getProperty(CommonConstants.SHIFU_COLUMN_CONFIG), sourceType); } catch (IOException e) { throw new RuntimeException(e); } this.columnCategoryIndexMapping = new HashMap<Integer, Map<String, Integer>>(); for (ColumnConfig config : this.columnConfigList) { if (config.isCategorical()) { if (config.getBinCategory() != null) { Map<String, Integer> tmpMap = new HashMap<String, Integer>(); for (int i = 0; i < config.getBinCategory().size(); i++) { List<String> catVals = CommonUtils.flattenCatValGrp(config.getBinCategory().get(i)); for (String cval : catVals) { tmpMap.put(cval, i); } } this.columnCategoryIndexMapping.put(config.getColumnNum(), tmpMap); } } } this.hasCandidates = CommonUtils.hasCandidateColumns(columnConfigList); // create Splitter String delimiter = context.getProps().getProperty(Constants.SHIFU_OUTPUT_DATA_DELIMITER); this.splitter = MapReduceUtils.generateShifuOutputSplitter(delimiter); Integer kCrossValidation = this.modelConfig.getTrain().getNumKFold(); if (kCrossValidation != null && kCrossValidation > 0) { isKFoldCV = true; LOG.info("Cross validation is enabled by kCrossValidation: {}.", kCrossValidation); } Double upSampleWeight = modelConfig.getTrain().getUpSampleWeight(); if (Double.compare(upSampleWeight, 1d) != 0 && (modelConfig.isRegression() || (modelConfig.isClassification() && modelConfig.getTrain().isOneVsAll()))) { // set mean to upSampleWeight -1 and get sample + 1 to make sure no zero sample value LOG.info("Enable up sampling with weight {}.", upSampleWeight); this.upSampleRng = new PoissonDistribution(upSampleWeight - 1); } this.isContinuousEnabled = Boolean.TRUE.toString() .equalsIgnoreCase(context.getProps().getProperty(CommonConstants.CONTINUOUS_TRAINING)); this.workerThreadCount = modelConfig.getTrain().getWorkerThreadCount(); this.threadPool = Executors.newFixedThreadPool(this.workerThreadCount); // enable shut down logic context.addCompletionCallBack(new WorkerCompletionCallBack<DTMasterParams, DTWorkerParams>() { @Override public void callback(WorkerContext<DTMasterParams, DTWorkerParams> context) { DTWorker.this.threadPool.shutdownNow(); try { DTWorker.this.threadPool.awaitTermination(2, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } }); this.trainerId = Integer.valueOf(context.getProps().getProperty(CommonConstants.SHIFU_TRAINER_ID, "0")); this.isOneVsAll = modelConfig.isClassification() && modelConfig.getTrain().isOneVsAll(); GridSearch gs = new GridSearch(modelConfig.getTrain().getParams(), modelConfig.getTrain().getGridConfigFileContent()); Map<String, Object> validParams = this.modelConfig.getTrain().getParams(); if (gs.hasHyperParam()) { validParams = gs.getParams(this.trainerId); LOG.info("Start grid search worker with params: {}", validParams); } this.treeNum = Integer.valueOf(validParams.get("TreeNum").toString()); double memoryFraction = Double.valueOf(context.getProps().getProperty("guagua.data.memoryFraction", "0.6")); LOG.info("Max heap memory: {}, fraction: {}", Runtime.getRuntime().maxMemory(), memoryFraction); double validationRate = this.modelConfig.getValidSetRate(); if (StringUtils.isNotBlank(modelConfig.getValidationDataSetRawPath())) { // fixed 0.6 and 0.4 of max memory for trainingData and validationData this.trainingData = new MemoryLimitedList<Data>( (long) (Runtime.getRuntime().maxMemory() * memoryFraction * 0.6), new ArrayList<Data>()); this.validationData = new MemoryLimitedList<Data>( (long) (Runtime.getRuntime().maxMemory() * memoryFraction * 0.4), new ArrayList<Data>()); } else { if (Double.compare(validationRate, 0d) != 0) { this.trainingData = new MemoryLimitedList<Data>( (long) (Runtime.getRuntime().maxMemory() * memoryFraction * (1 - validationRate)), new ArrayList<Data>()); this.validationData = new MemoryLimitedList<Data>( (long) (Runtime.getRuntime().maxMemory() * memoryFraction * validationRate), new ArrayList<Data>()); } else { this.trainingData = new MemoryLimitedList<Data>( (long) (Runtime.getRuntime().maxMemory() * memoryFraction), new ArrayList<Data>()); } } int[] inputOutputIndex = DTrainUtils.getNumericAndCategoricalInputAndOutputCounts(this.columnConfigList); // numerical + categorical = # of all input this.inputCount = inputOutputIndex[0] + inputOutputIndex[1]; // regression outputNodeCount is 1, binaryClassfication, it is 1, OneVsAll it is 1, Native classification it is // 1, with index of 0,1,2,3 denotes different classes this.isAfterVarSelect = (inputOutputIndex[3] == 1); this.isManualValidation = (modelConfig.getValidationDataSetRawPath() != null && !"".equals(modelConfig.getValidationDataSetRawPath())); int numClasses = this.modelConfig.isClassification() ? this.modelConfig.getTags().size() : 2; String imStr = validParams.get("Impurity").toString(); int minInstancesPerNode = Integer.valueOf(validParams.get("MinInstancesPerNode").toString()); double minInfoGain = Double.valueOf(validParams.get("MinInfoGain").toString()); if (imStr.equalsIgnoreCase("entropy")) { impurity = new Entropy(numClasses, minInstancesPerNode, minInfoGain); } else if (imStr.equalsIgnoreCase("gini")) { impurity = new Gini(numClasses, minInstancesPerNode, minInfoGain); } else if (imStr.equalsIgnoreCase("friedmanmse")) { impurity = new FriedmanMSE(minInstancesPerNode, minInfoGain); } else { impurity = new Variance(minInstancesPerNode, minInfoGain); } this.isRF = ALGORITHM.RF.toString().equalsIgnoreCase(modelConfig.getAlgorithm()); this.isGBDT = ALGORITHM.GBT.toString().equalsIgnoreCase(modelConfig.getAlgorithm()); String lossStr = validParams.get("Loss").toString(); if (lossStr.equalsIgnoreCase("log")) { this.loss = new LogLoss(); } else if (lossStr.equalsIgnoreCase("absolute")) { this.loss = new AbsoluteLoss(); } else if (lossStr.equalsIgnoreCase("halfgradsquared")) { this.loss = new HalfGradSquaredLoss(); } else if (lossStr.equalsIgnoreCase("squared")) { this.loss = new SquaredLoss(); } else { try { this.loss = (Loss) ClassUtils.newInstance(Class.forName(lossStr)); } catch (ClassNotFoundException e) { LOG.warn("Class not found for {}, using default SquaredLoss", lossStr); this.loss = new SquaredLoss(); } } if (this.isGBDT) { this.learningRate = Double.valueOf(validParams.get(CommonConstants.LEARNING_RATE).toString()); Object swrObj = validParams.get("GBTSampleWithReplacement"); if (swrObj != null) { this.gbdtSampleWithReplacement = Boolean.TRUE.toString().equalsIgnoreCase(swrObj.toString()); } Object dropoutObj = validParams.get(CommonConstants.DROPOUT_RATE); if (dropoutObj != null) { this.dropOutRate = Double.valueOf(dropoutObj.toString()); } } this.isStratifiedSampling = this.modelConfig.getTrain().getStratifiedSample(); this.checkpointOutput = new Path(context.getProps() .getProperty(CommonConstants.SHIFU_DT_MASTER_CHECKPOINT_FOLDER, "tmp/cp_" + context.getAppId())); LOG.info( "Worker init params:isAfterVarSel={}, treeNum={}, impurity={}, loss={}, learningRate={}, gbdtSampleWithReplacement={}, isRF={}, isGBDT={}, isStratifiedSampling={}, isKFoldCV={}, kCrossValidation={}, dropOutRate={}", isAfterVarSelect, treeNum, impurity.getClass().getName(), loss.getClass().getName(), this.learningRate, this.gbdtSampleWithReplacement, this.isRF, this.isGBDT, this.isStratifiedSampling, this.isKFoldCV, kCrossValidation, this.dropOutRate); // for fail over, load existing trees if (!context.isFirstIteration()) { if (this.isGBDT) { // set flag here and recover later in doComputing, this is to make sure recover after load part which // can load latest trees in #doCompute isNeedRecoverGBDTPredict = true; } else { // RF , trees are recovered from last master results recoverTrees = context.getLastMasterResult().getTrees(); } } if (context.isFirstIteration() && this.isContinuousEnabled && this.isGBDT) { Path modelPath = new Path(context.getProps().getProperty(CommonConstants.GUAGUA_OUTPUT)); TreeModel existingModel = null; try { existingModel = (TreeModel) ModelSpecLoaderUtils.loadModel(modelConfig, modelPath, ShifuFileUtils.getFileSystemBySourceType(this.modelConfig.getDataSet().getSource())); } catch (IOException e) { LOG.error("Error in get existing model, will ignore and start from scratch", e); } if (existingModel == null) { LOG.warn("No model is found even set to continuous model training."); return; } else { recoverTrees = existingModel.getTrees(); LOG.info("Loading existing {} trees", recoverTrees.size()); } } }
From source file:org.openecomp.sdc.be.model.operations.impl.ComponentOperation.java
public Either<ComponentMetadataData, StorageOperationStatus> getLatestComponentMetadataByUuid( NodeTypeEnum nodeType, String uuid) { Either<ComponentMetadataData, StorageOperationStatus> getComponentResult = null; List<ComponentMetadataData> latestVersionList = null; ComponentMetadataData latestVersion = null; Map<String, Object> propertiesToMatch = new HashMap<String, Object>(); propertiesToMatch.put(GraphPropertiesDictionary.UUID.getProperty(), uuid); propertiesToMatch.put(GraphPropertiesDictionary.IS_HIGHEST_VERSION.getProperty(), true); Either<List<ComponentMetadataData>, TitanOperationStatus> getComponentEither = titanGenericDao .getByCriteria(nodeType, propertiesToMatch, ComponentMetadataData.class); if (getComponentEither.isRight()) { log.debug("Couldn't fetch metadata for component with type {} and uuid {}, error: {}", nodeType, uuid, getComponentEither.right().value()); getComponentResult = Either.right( DaoStatusConverter.convertTitanStatusToStorageStatus(getComponentEither.right().value())); }/*from w w w .ja v a 2 s .c om*/ if (getComponentResult == null) { latestVersionList = getComponentEither.left().value(); if (latestVersionList.isEmpty()) { log.debug("Component with type {} and uuid {} was not found", nodeType, uuid); getComponentResult = Either.right(StorageOperationStatus.NOT_FOUND); } } if (getComponentResult == null) { latestVersion = latestVersionList.size() == 1 ? latestVersionList.get(0) : latestVersionList.stream() .max((c1, c2) -> Double.compare( Double.parseDouble(c1.getMetadataDataDefinition().getVersion()), Double.parseDouble(c2.getMetadataDataDefinition().getVersion()))) .get(); getComponentResult = Either.left(latestVersion); } return getComponentResult; }
From source file:com.wwidesigner.optimization.ObjectiveFunctionOptimizer.java
/** * Sort PointValuePairs from best to worst, followed by {@code null} * elements.//w w w . ja v a 2s . c om * * @param goal * - GoalType.MINIMIZE or GoalType.MAXIMIZE * @param optima * - array of point-value pairs from successive optimizations. */ protected static void sortPairs(final GoalType goal, PointValuePair[] optima) { Arrays.sort(optima, new Comparator<PointValuePair>() { public int compare(final PointValuePair o1, final PointValuePair o2) { if (o1 == null) { return (o2 == null) ? 0 : 1; } else if (o2 == null) { return -1; } final double v1 = o1.getValue(); final double v2 = o2.getValue(); return (goal == GoalType.MINIMIZE) ? Double.compare(v1, v2) : Double.compare(v2, v1); } }); }
From source file:net.sf.maltcms.chromaui.msviewer.ui.panel.MassSpectrumPanel.java
private TopKItemsLabelGenerator createTopKItemsLabelGenerator(int topK, int series) { List<Point> seriesItemList = new ArrayList<>(); Comparator<Point> c = new Comparator<Point>() { @Override/* w w w . j av a 2 s.co m*/ public int compare(Point o1, Point o2) { double v1 = sc.getYValue(o1.x, o1.y); double v2 = sc.getYValue(o2.x, o2.y); return Double.compare(Math.abs(v1), Math.abs(v2)); } }; for (int i = 0; i < sc.getItemCount(series); i++) { double intens = sc.getYValue(series, i); if (Math.abs(intens) > 0) { seriesItemList.add(new Point(series, i)); } } Collections.sort(seriesItemList, c); return new TopKItemsLabelGenerator(seriesItemList, topK); }
From source file:org.grouplens.lenskit.vectors.SparseVector.java
/** * Get the keys of this vector sorted by the value of the items * stored for each key.//from w w w .j av a 2 s .c o m * * @param decreasing If {@code true}, sort in decreasing order. * @return The sorted list of keys of this vector. */ public LongArrayList keysByValue(boolean decreasing) { long[] skeys = keySet().toLongArray(); LongComparator cmp; // Set up the comparator. We use the key as a secondary comparison to get // a reproducible sort irrespective of sorting algorithm. if (decreasing) { cmp = new AbstractLongComparator() { @Override public int compare(long k1, long k2) { int c = Double.compare(get(k2), get(k1)); if (c != 0) { return c; } else { return Longs.compare(k1, k2); } } }; } else { cmp = new AbstractLongComparator() { @Override public int compare(long k1, long k2) { int c = Double.compare(get(k1), get(k2)); if (c != 0) { return c; } else { return Longs.compare(k1, k2); } } }; } LongArrays.quickSort(skeys, cmp); return LongArrayList.wrap(skeys); }
From source file:com.joptimizer.optimizers.LPPresolverNetlibTest.java
/** * Tests the presolving of a netlib problem. * If checkExpectedSolution, the presolving is checked step by step against * a (beforehand) known solution of the problem. * NOTE: this known solution might differ from the solution given by the presolver * (e.g. in the presence of a weakly dominated column @see {@link LPPresolver#removeDominatedColumns}, * or if it is calculated with simplex method * or if bounds are not exactly the same), so sometimes it is not a good test. *///from w w w .j av a 2s. com public void doTesting(String problemName, boolean checkExpectedSolution, double myExpectedTolerance) throws Exception { log.debug("doTesting: " + problemName); int s = (int) Utils.loadDoubleArrayFromFile("lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "standardS.txt")[0]; double[] c = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "standardC.txt"); double[][] A = Utils.loadDoubleMatrixFromFile( "lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "standardA.csv", ",".charAt(0)); double[] b = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "standardB.txt"); double[] lb = Utils.loadDoubleArrayFromFile("lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "standardLB.txt"); double[] ub = Utils.loadDoubleArrayFromFile("lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "standardUB.txt"); double[] expectedSolution = Utils.loadDoubleArrayFromFile("lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "standardSolution.txt"); double expectedValue = Utils.loadDoubleArrayFromFile("lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "standardValue.txt")[0]; double expectedTolerance = Utils.loadDoubleArrayFromFile("lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "standardTolerance.txt")[0]; //in order to compare with tha Math results, we must have the same bounds for (int i = 0; i < lb.length; i++) { if (Double.isNaN(lb[i])) { lb[i] = -9999999d; //the same as the notebook value } } for (int i = 0; i < ub.length; i++) { if (Double.isNaN(ub[i])) { ub[i] = +9999999d; //the same as the notebook value } } RealMatrix AMatrix = MatrixUtils.createRealMatrix(A); RealVector bVector = MatrixUtils.createRealVector(b); //expectedTolerance = Math.max(expectedTolerance, AMatrix.operate(MatrixUtils.createRealVector(expectedSolution)).subtract(bVector).getNorm()); expectedTolerance = Math.max(1.e-9, expectedTolerance); // must be: A pXn with rank(A)=p < n QRSparseFactorization qr = new QRSparseFactorization(new SparseDoubleMatrix2D(A)); qr.factorize(); log.debug("p : " + AMatrix.getRowDimension()); log.debug("n : " + AMatrix.getColumnDimension()); log.debug("full rank: " + qr.hasFullRank()); LPPresolver lpPresolver = new LPPresolver(); lpPresolver.setNOfSlackVariables((short) s); if (checkExpectedSolution) { lpPresolver.setExpectedSolution(expectedSolution);// this is just for test! lpPresolver.setExpectedTolerance(myExpectedTolerance);// this is just for test! } // lpPresolver.setAvoidFillIn(true); // lpPresolver.setZeroTolerance(1.e-13); lpPresolver.presolve(c, A, b, lb, ub); int n = lpPresolver.getPresolvedN(); DoubleMatrix1D presolvedC = lpPresolver.getPresolvedC(); DoubleMatrix2D presolvedA = lpPresolver.getPresolvedA(); DoubleMatrix1D presolvedB = lpPresolver.getPresolvedB(); DoubleMatrix1D presolvedLb = lpPresolver.getPresolvedLB(); DoubleMatrix1D presolvedUb = lpPresolver.getPresolvedUB(); DoubleMatrix1D presolvedYlb = lpPresolver.getPresolvedYlb(); DoubleMatrix1D presolvedYub = lpPresolver.getPresolvedYub(); DoubleMatrix1D presolvedZlb = lpPresolver.getPresolvedZlb(); DoubleMatrix1D presolvedZub = lpPresolver.getPresolvedZub(); log.debug("n : " + n); if (log.isDebugEnabled() && n > 0) { log.debug("presolvedC : " + ArrayUtils.toString(presolvedC.toArray())); log.debug("presolvedA : " + ArrayUtils.toString(presolvedA.toArray())); log.debug("presolvedB : " + ArrayUtils.toString(presolvedB.toArray())); log.debug("presolvedLb : " + ArrayUtils.toString(presolvedLb.toArray())); log.debug("presolvedUb : " + ArrayUtils.toString(presolvedUb.toArray())); log.debug("presolvedYlb: " + ArrayUtils.toString(presolvedYlb.toArray())); log.debug("presolvedYub: " + ArrayUtils.toString(presolvedYub.toArray())); log.debug("presolvedZlb: " + ArrayUtils.toString(presolvedZlb.toArray())); log.debug("presolvedZub: " + ArrayUtils.toString(presolvedZub.toArray())); } if (n == 0) { // deterministic problem double[] sol = lpPresolver.postsolve(new double[] {}); assertEquals(expectedSolution.length, sol.length); for (int i = 0; i < sol.length; i++) { // log.debug("i: " + i); assertEquals(expectedSolution[i], sol[i], 1.e-9); } } else { Utils.writeDoubleArrayToFile(presolvedC.toArray(), "target" + File.separator + "presolvedC_" + problemName + ".txt"); Utils.writeDoubleMatrixToFile(presolvedA.toArray(), "target" + File.separator + "presolvedA_" + problemName + ".csv"); Utils.writeDoubleArrayToFile(presolvedB.toArray(), "target" + File.separator + "presolvedB_" + problemName + ".txt"); Utils.writeDoubleArrayToFile(presolvedLb.toArray(), "target" + File.separator + "presolvedLB_" + problemName + ".txt"); Utils.writeDoubleArrayToFile(presolvedUb.toArray(), "target" + File.separator + "presolvedUB_" + problemName + ".txt"); // check objective function double delta = expectedTolerance; RealVector presolvedES = MatrixUtils.createRealVector(lpPresolver.presolve(expectedSolution)); double presolvedEV = MatrixUtils.createRealVector(presolvedC.toArray()).dotProduct(presolvedES);// in general it is different from the optimal value log.debug("presolved expected value: " + presolvedEV); RealVector postsolvedES = MatrixUtils.createRealVector(lpPresolver.postsolve(presolvedES.toArray())); double postsolvedEV = MatrixUtils.createRealVector(c).dotProduct(postsolvedES); //assertEquals(expectedValue, postsolvedEV, delta); assertTrue(Math.abs((expectedValue - postsolvedEV) / expectedValue) < delta); // check postsolved constraints for (int i = 0; i < lb.length; i++) { double di = Double.isNaN(lb[i]) ? -Double.MAX_VALUE : lb[i]; assertTrue(di <= postsolvedES.getEntry(i) + delta); } for (int i = 0; i < ub.length; i++) { double di = Double.isNaN(ub[i]) ? Double.MAX_VALUE : ub[i]; assertTrue(di + delta >= postsolvedES.getEntry(i)); } RealVector Axmb = AMatrix.operate(postsolvedES).subtract(bVector); assertEquals(0., Axmb.getNorm(), 1.5 * expectedTolerance); // check presolved constraints assertEquals(presolvedLb.size(), presolvedES.getDimension()); assertEquals(presolvedUb.size(), presolvedES.getDimension()); AMatrix = MatrixUtils.createRealMatrix(presolvedA.toArray());//reassigned to avoid memory consumption bVector = MatrixUtils.createRealVector(presolvedB.toArray()); for (int i = 0; i < presolvedLb.size(); i++) { double di = Double.isNaN(presolvedLb.getQuick(i)) ? -Double.MAX_VALUE : presolvedLb.getQuick(i); assertTrue(di <= presolvedES.getEntry(i) + delta); } for (int i = 0; i < presolvedUb.size(); i++) { double di = Double.isNaN(presolvedUb.getQuick(i)) ? Double.MAX_VALUE : presolvedUb.getQuick(i); assertTrue(di + delta >= presolvedES.getEntry(i)); } Axmb = AMatrix.operate(presolvedES).subtract(bVector); assertEquals(0., Axmb.getNorm(), 1.5 * expectedTolerance); //check for 0-rows List<Integer> zeroRows = new ArrayList<Integer>(); for (int i = 0; i < presolvedA.rows(); i++) { boolean isNotZero = false; for (int j = 0; !isNotZero && j < presolvedA.columns(); j++) { isNotZero = Double.compare(0., presolvedA.getQuick(i, j)) != 0; } if (!isNotZero) { zeroRows.add(zeroRows.size(), i); } } if (!zeroRows.isEmpty()) { log.debug("All 0 entries in rows " + ArrayUtils.toString(zeroRows)); fail(); } //check for 0-columns List<Integer> zeroCols = new ArrayList<Integer>(); for (int j = 0; j < presolvedA.columns(); j++) { boolean isNotZero = false; for (int i = 0; !isNotZero && i < presolvedA.rows(); i++) { isNotZero = Double.compare(0., presolvedA.getQuick(i, j)) != 0; } if (!isNotZero) { zeroCols.add(zeroCols.size(), j); } } if (!zeroCols.isEmpty()) { log.debug("All 0 entries in columns " + ArrayUtils.toString(zeroCols)); fail(); } // check rank(A): must be A pXn with rank(A)=p < n qr = new QRSparseFactorization(new SparseDoubleMatrix2D(presolvedA.toArray())); qr.factorize(); boolean isFullRank = qr.hasFullRank(); log.debug("p : " + AMatrix.getRowDimension()); log.debug("n : " + AMatrix.getColumnDimension()); log.debug("full rank: " + isFullRank); assertTrue(AMatrix.getRowDimension() < AMatrix.getColumnDimension()); assertTrue(isFullRank); } }
From source file:ca.mcgill.cs.creco.logic.ScoredAttribute.java
private void sortEntries(List<Map.Entry<String, Double>> pEntries) { Collections.sort(pEntries, new Comparator<Map.Entry<String, Double>>() { @Override/* w w w.j a va2 s.c o m*/ public int compare(Map.Entry<String, Double> pA, Map.Entry<String, Double> pB) { return -Double.compare(pA.getValue(), pB.getValue()); } }); }
From source file:enumj.EnumeratorTest.java
@Test public void testAllMatch() { System.out.println("allMatch"); EnumeratorGenerator.generators().limit(100).map(g -> g.enumerator()).map(e -> e.map(x -> x * x)) .forEach(e -> {/*www . jav a 2 s.c o m*/ assertTrue(e.allMatch(x -> x >= 0)); }); final Random rnd = new Random(100); EnumeratorGenerator.generators().limit(100).map(g -> g.enumerator()) .map(e -> e.append(-1.0, -0.5, 0.0, 0.25, 0.75)) .map(e -> e.map((Double d) -> Pair.of(d, rnd.nextDouble())) .sorted((p1, p2) -> Double.compare(p1.getRight(), p2.getRight())).map(p -> p.getLeft())) .map(e -> e.map(x -> x * x)).forEach(e -> { assertFalse(e.allMatch(x -> x > 1)); }); }
From source file:knowledgeMiner.mining.SentenceParserHeuristic.java
/** * Locates the anchors in a string and forms a replacement map for them. * // www . j a va2 s . c o m * @param sentence * The sentence to search for anchors. * @param anchorWeights * An optional map to record any weight information for the * anchors. If no weights in the text, all weights are assumed to * be 1.0. * @return A SortedMap of anchors, ordered in largest text size to smallest. */ public static SortedMap<String, String> locateAnchors(String sentence, Map<String, Double> anchorWeights) { SortedMap<String, String> anchorMap = new TreeMap<>(new Comparator<String>() { @Override public int compare(String o1, String o2) { int result = Double.compare(o1.length(), o2.length()); if (result != 0) return -result; return o1.compareTo(o2); } }); Matcher m = WikiParser.ANCHOR_PARSER.matcher(sentence); while (m.find()) { String replString = (m.group(2) != null) ? m.group(2) : m.group(1); anchorMap.put(replString, m.group()); } return anchorMap; }
From source file:com.joptimizer.optimizers.PrimalDualMethod.java
@Override public int optimize() throws Exception { Log.i(MainActivity.JOPTIMIZER_LOGTAG, "optimize"); long tStart = System.currentTimeMillis(); OptimizationResponse response = new OptimizationResponse(); // @TODO: check assumptions!!! // if(getA()!=null){ // if(ALG.rank(getA())>=getA().rows()){ // throw new IllegalArgumentException("A-rank must be less than A-rows"); // }// ww w . j ava 2 s. co m // } DoubleMatrix1D X0 = getInitialPoint(); if (X0 == null) { DoubleMatrix1D X0NF = getNotFeasibleInitialPoint(); if (X0NF != null) { double rPriX0NFNorm = Math.sqrt(ALG.norm2(rPri(X0NF))); DoubleMatrix1D fiX0NF = getFi(X0NF); int maxIndex = Utils.getMaxIndex(fiX0NF.toArray()); double maxValue = fiX0NF.get(maxIndex); if (Log.isLoggable(MainActivity.JOPTIMIZER_LOGTAG, Log.DEBUG)) { Log.d(MainActivity.JOPTIMIZER_LOGTAG, "rPriX0NFNorm : " + rPriX0NFNorm); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "X0NF : " + ArrayUtils.toString(X0NF.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "fiX0NF : " + ArrayUtils.toString(fiX0NF.toArray())); } if (maxValue < 0 && rPriX0NFNorm <= getToleranceFeas()) { //the provided not-feasible starting point is already feasible Log.d(MainActivity.JOPTIMIZER_LOGTAG, "the provided initial point is already feasible"); X0 = X0NF; } } if (X0 == null) { BasicPhaseIPDM bf1 = new BasicPhaseIPDM(this); X0 = bf1.findFeasibleInitialPoint(); } } //check X0 feasibility DoubleMatrix1D fiX0 = getFi(X0); int maxIndex = Utils.getMaxIndex(fiX0.toArray()); double maxValue = fiX0.get(maxIndex); double rPriX0Norm = Math.sqrt(ALG.norm2(rPri(X0))); if (maxValue >= 0 || rPriX0Norm > getToleranceFeas()) { Log.d(MainActivity.JOPTIMIZER_LOGTAG, "rPriX0Norm : " + rPriX0Norm); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "ineqX0 : " + ArrayUtils.toString(fiX0.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "max ineq index: " + maxIndex); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "max ineq value: " + maxValue); throw new Exception("initial point must be strictly feasible"); } DoubleMatrix1D V0 = (getA() != null) ? F1.make(getA().rows()) : F1.make(0); DoubleMatrix1D L0 = getInitialLagrangian(); if (L0 != null) { for (int j = 0; j < L0.size(); j++) { // must be >0 if (L0.get(j) <= 0) { throw new IllegalArgumentException("initial lagrangian must be strictly > 0"); } } } else { //L0 = F1.make(getFi().length, 1.);// must be >0 L0 = F1.make(getFi().length, Math.min(1, (double) getDim() / getFi().length));// must be >0 } if (Log.isLoggable(MainActivity.JOPTIMIZER_LOGTAG, Log.DEBUG)) { Log.d(MainActivity.JOPTIMIZER_LOGTAG, "X0: " + ArrayUtils.toString(X0.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "V0: " + ArrayUtils.toString(V0.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "L0: " + ArrayUtils.toString(L0.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "toleranceFeas: " + getToleranceFeas()); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "tolerance : " + getTolerance()); } DoubleMatrix1D X = X0; DoubleMatrix1D V = V0; DoubleMatrix1D L = L0; //double F0X; //DoubleMatrix1D gradF0X = null; //DoubleMatrix1D fiX = null; //DoubleMatrix2D GradFiX = null; //DoubleMatrix1D rPriX = null; //DoubleMatrix1D rCentXLt = null; //DoubleMatrix1D rDualXLV = null; //double rPriXNorm = Double.NaN; //double rCentXLtNorm = Double.NaN; //double rDualXLVNorm = Double.NaN; //double normRXLVt = Double.NaN; double previousF0X = Double.NaN; double previousRPriXNorm = Double.NaN; double previousRDualXLVNorm = Double.NaN; double previousSurrDG = Double.NaN; double t; int iteration = 0; while (true) { iteration++; // iteration limit condition if (iteration == getMaxIteration() + 1) { response.setReturnCode(OptimizationResponse.WARN); Log.w(MainActivity.JOPTIMIZER_LOGTAG, "Max iterations limit reached"); break; } double F0X = getF0(X); if (Log.isLoggable(MainActivity.JOPTIMIZER_LOGTAG, Log.DEBUG)) { Log.d(MainActivity.JOPTIMIZER_LOGTAG, "iteration: " + iteration); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "X=" + ArrayUtils.toString(X.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "L=" + ArrayUtils.toString(L.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "V=" + ArrayUtils.toString(V.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "f0(X)=" + F0X); } // if(!Double.isNaN(previousF0X)){ // if (previousF0X < F0X) { // throw new Exception("critical minimization problem"); // } // } // previousF0X = F0X; // determine functions evaluations DoubleMatrix1D gradF0X = getGradF0(X); DoubleMatrix1D fiX = getFi(X); DoubleMatrix2D GradFiX = getGradFi(X); DoubleMatrix2D[] HessFiX = getHessFi(X); // determine t double surrDG = getSurrogateDualityGap(fiX, L); t = getMu() * getFi().length / surrDG; Log.d(MainActivity.JOPTIMIZER_LOGTAG, "t: " + t); // determine residuals DoubleMatrix1D rPriX = rPri(X); DoubleMatrix1D rCentXLt = rCent(fiX, L, t); DoubleMatrix1D rDualXLV = rDual(GradFiX, gradF0X, L, V); double rPriXNorm = Math.sqrt(ALG.norm2(rPriX)); double rCentXLtNorm = Math.sqrt(ALG.norm2(rCentXLt)); double rDualXLVNorm = Math.sqrt(ALG.norm2(rDualXLV)); double normRXLVt = Math .sqrt(Math.pow(rPriXNorm, 2) + Math.pow(rCentXLtNorm, 2) + Math.pow(rDualXLVNorm, 2)); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "rPri norm: " + rPriXNorm); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "rCent norm: " + rCentXLtNorm); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "rDual norm: " + rDualXLVNorm); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "surrDG : " + surrDG); // custom exit condition if (checkCustomExitConditions(X)) { response.setReturnCode(OptimizationResponse.SUCCESS); break; } // exit condition if (rPriXNorm <= getToleranceFeas() && rDualXLVNorm <= getToleranceFeas() && surrDG <= getTolerance()) { response.setReturnCode(OptimizationResponse.SUCCESS); break; } // progress conditions if (isCheckProgressConditions()) { if (!Double.isNaN(previousRPriXNorm) && !Double.isNaN(previousRDualXLVNorm) && !Double.isNaN(previousSurrDG)) { if ((previousRPriXNorm <= rPriXNorm && rPriXNorm >= getToleranceFeas()) || (previousRDualXLVNorm <= rDualXLVNorm && rDualXLVNorm >= getToleranceFeas())) { Log.w(MainActivity.JOPTIMIZER_LOGTAG, "No progress achieved, exit iterations loop without desired accuracy"); response.setReturnCode(OptimizationResponse.WARN); break; } } previousRPriXNorm = rPriXNorm; previousRDualXLVNorm = rDualXLVNorm; previousSurrDG = surrDG; } // compute primal-dual search direction // a) prepare 11.55 system DoubleMatrix2D HessSum = getHessF0(X); for (int j = 0; j < getFi().length; j++) { if (HessFiX[j] != FunctionsUtils.ZEROES_MATRIX_PLACEHOLDER) { HessSum.assign(HessFiX[j].copy().assign(Mult.mult(L.get(j))), Functions.plus); } //Log.d(MainActivity.JOPTIMIZER_LOGTAG,"HessSum : " + ArrayUtils.toString(HessSum.toArray())); } // DoubleMatrix2D GradSum = F2.make(getDim(), getDim()); // for (int j = 0; j < getFi().length; j++) { // DoubleMatrix1D g = GradFiX.viewRow(j); // GradSum.assign(ALG.multOuter(g, g, null).assign(Mult.mult(-L.get(j) / fiX.get(j))), Functions.plus); // //Log.d(MainActivity.JOPTIMIZER_LOGTAG,"GradSum : " + ArrayUtils.toString(GradSum.toArray())); // } DoubleMatrix2D GradSum = F2.make(getDim(), getDim()); for (int j = 0; j < getFi().length; j++) { final double c = -L.getQuick(j) / fiX.getQuick(j); DoubleMatrix1D g = GradFiX.viewRow(j); SeqBlas.seqBlas.dger(c, g, g, GradSum); //Log.d(MainActivity.JOPTIMIZER_LOGTAG,"GradSum : " + ArrayUtils.toString(GradSum.toArray())); } DoubleMatrix2D Hpd = HessSum.assign(GradSum, Functions.plus); //DoubleMatrix2D Hpd = getHessF0(X).assign(HessSum, Functions.plus).assign(GradSum, Functions.plus); DoubleMatrix1D gradSum = F1.make(getDim()); for (int j = 0; j < getFi().length; j++) { gradSum.assign(GradFiX.viewRow(j).copy().assign(Mult.div(-t * fiX.get(j))), Functions.plus); //Log.d(MainActivity.JOPTIMIZER_LOGTAG,"gradSum : " + ArrayUtils.toString(gradSum.toArray())); } DoubleMatrix1D g = null; if (getAT() == null) { g = gradF0X.copy().assign(gradSum, Functions.plus); } else { g = gradF0X.copy().assign(gradSum, Functions.plus).assign(ALG.mult(getAT(), V), Functions.plus); } // b) solving 11.55 system if (this.kktSolver == null) { this.kktSolver = new BasicKKTSolver(); } //KKTSolver solver = new DiagonalKKTSolver(); if (isCheckKKTSolutionAccuracy()) { kktSolver.setCheckKKTSolutionAccuracy(true); kktSolver.setToleranceKKT(getToleranceKKT()); } kktSolver.setHMatrix(Hpd.toArray()); kktSolver.setGVector(g.toArray()); if (getA() != null) { kktSolver.setAMatrix(getA().toArray()); kktSolver.setATMatrix(getAT().toArray()); kktSolver.setHVector(rPriX.toArray()); } double[][] sol = kktSolver.solve(); DoubleMatrix1D stepX = F1.make(sol[0]); DoubleMatrix1D stepV = (sol[1] != null) ? F1.make(sol[1]) : F1.make(0); if (Log.isLoggable(MainActivity.JOPTIMIZER_LOGTAG, Log.DEBUG)) { Log.d(MainActivity.JOPTIMIZER_LOGTAG, "stepX: " + ArrayUtils.toString(stepX.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "stepV: " + ArrayUtils.toString(stepV.toArray())); } // c) solving for L DoubleMatrix1D stepL = null; DoubleMatrix2D diagFInv = F2.diagonal(fiX.copy().assign(Functions.inv)); DoubleMatrix2D diagL = F2.diagonal(L); stepL = ALG.mult(diagFInv, ALG.mult(diagL, ALG.mult(GradFiX, stepX))).assign(Mult.mult(-1)) .assign(ALG.mult(diagFInv, rCentXLt), Functions.plus); if (Log.isLoggable(MainActivity.JOPTIMIZER_LOGTAG, Log.DEBUG)) { Log.d(MainActivity.JOPTIMIZER_LOGTAG, "stepL: " + ArrayUtils.toString(stepL.toArray())); } // line search and update // a) sMax computation double sMax = Double.MAX_VALUE; for (int j = 0; j < getFi().length; j++) { if (stepL.get(j) < 0) { sMax = Math.min(-L.get(j) / stepL.get(j), sMax); } } sMax = Math.min(1, sMax); double s = 0.99 * sMax; // b) backtracking with f DoubleMatrix1D X1 = F1.make(X.size()); DoubleMatrix1D L1 = F1.make(L.size()); DoubleMatrix1D V1 = F1.make(V.size()); DoubleMatrix1D fiX1 = null; DoubleMatrix1D gradF0X1 = null; DoubleMatrix2D GradFiX1 = null; DoubleMatrix1D rPriX1 = null; DoubleMatrix1D rCentX1L1t = null; DoubleMatrix1D rDualX1L1V1 = null; int cnt = 0; boolean areAllNegative = true; while (cnt < 500) { cnt++; // X1 = X + s*stepX X1 = stepX.copy().assign(Mult.mult(s)).assign(X, Functions.plus); DoubleMatrix1D ineqValueX1 = getFi(X1); areAllNegative = true; for (int j = 0; areAllNegative && j < getFi().length; j++) { areAllNegative = (Double.compare(ineqValueX1.get(j), 0.) < 0); } if (areAllNegative) { break; } s = getBeta() * s; } if (!areAllNegative) { //exited from the feasible region throw new Exception("Optimization failed: impossible to remain within the faesible region"); } Log.d(MainActivity.JOPTIMIZER_LOGTAG, "s: " + s); // c) backtracking with norm double previousNormRX1L1V1t = Double.NaN; cnt = 0; while (cnt < 500) { cnt++; X1 = stepX.copy().assign(Mult.mult(s)).assign(X, Functions.plus); L1 = stepL.copy().assign(Mult.mult(s)).assign(L, Functions.plus); V1 = stepV.copy().assign(Mult.mult(s)).assign(V, Functions.plus); // X1.assign(stepX.copy().assign(Mult.mult(s)).assign(X, Functions.plus)); // L1.assign(stepL.copy().assign(Mult.mult(s)).assign(L, Functions.plus)); // V1.assign(stepV.copy().assign(Mult.mult(s)).assign(V, Functions.plus)); if (isInDomainF0(X1)) { fiX1 = getFi(X1); gradF0X1 = getGradF0(X1); GradFiX1 = getGradFi(X1); rPriX1 = rPri(X1); rCentX1L1t = rCent(fiX1, L1, t); rDualX1L1V1 = rDual(GradFiX1, gradF0X1, L1, V1); //Log.d(MainActivity.JOPTIMIZER_LOGTAG,"rPriX1 : "+ArrayUtils.toString(rPriX1.toArray())); //Log.d(MainActivity.JOPTIMIZER_LOGTAG,"rCentX1L1t : "+ArrayUtils.toString(rCentX1L1t.toArray())); //Log.d(MainActivity.JOPTIMIZER_LOGTAG,"rDualX1L1V1: "+ArrayUtils.toString(rDualX1L1V1.toArray())); double normRX1L1V1t = Math .sqrt(ALG.norm2(rPriX1) + ALG.norm2(rCentX1L1t) + ALG.norm2(rDualX1L1V1)); //Log.d(MainActivity.JOPTIMIZER_LOGTAG,"normRX1L1V1t: "+normRX1L1V1t); if (normRX1L1V1t <= (1 - getAlpha() * s) * normRXLVt) { break; } if (!Double.isNaN(previousNormRX1L1V1t)) { if (previousNormRX1L1V1t <= normRX1L1V1t) { Log.w(MainActivity.JOPTIMIZER_LOGTAG, "No progress achieved in backtracking with norm"); break; } } previousNormRX1L1V1t = normRX1L1V1t; } s = getBeta() * s; //Log.d(MainActivity.JOPTIMIZER_LOGTAG,"s: " + s); } // update X = X1; V = V1; L = L1; // fiX = fiX1; // gradF0X = gradF0X1; // GradFiX = GradFiX1; // // rPriX = rPriX1; // rCentXLt = rCentX1L1t; // rDualXLV = rDualX1L1V1; // rPriXNorm = Math.sqrt(ALG.norm2(rPriX)); // rCentXLtNorm = Math.sqrt(ALG.norm2(rCentXLt)); // rDualXLVNorm = Math.sqrt(ALG.norm2(rDualXLV)); // normRXLVt = Math.sqrt(Math.pow(rPriXNorm, 2) + Math.pow(rCentXLtNorm, 2) + Math.pow(rDualXLVNorm, 2)); // if(Log.isLoggable(MainActivity.JOPTIMIZER_LOGTAG, Log.DEBUG)){ // Log.d(MainActivity.JOPTIMIZER_LOGTAG,"rPri norm: " + rPriXNorm); // Log.d(MainActivity.JOPTIMIZER_LOGTAG,"rCent norm: " + rCentXLtNorm); // Log.d(MainActivity.JOPTIMIZER_LOGTAG,"rDual norm: " + rDualXLVNorm); // Log.d(MainActivity.JOPTIMIZER_LOGTAG,"surrDG : " + surrDG); // } } long tStop = System.currentTimeMillis(); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "time: " + (tStop - tStart)); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "sol : " + ArrayUtils.toString(X.toArray())); Log.d(MainActivity.JOPTIMIZER_LOGTAG, "ret code: " + response.getReturnCode()); response.setSolution(X.toArray()); setOptimizationResponse(response); return response.getReturnCode(); }