List of usage examples for java.lang Double compare
public static int compare(double d1, double d2)
From source file:org.esa.nest.gpf.ERSCalibrator.java
/** * Compute replica pulse variations correction factor. */// w w w .java 2 s .c om private void computeReplicaPulseVariationsCorrectionFactor() { final double replicaPulsePower; if (isCEOSFormat) { replicaPulsePower = getReplicaPulsePowerForCEOS(); } else { // ENVISAT replicaPulsePower = getReplicaPulsePowerForENVISAT(); } if (Double.compare(replicaPulsePower, -9999999.9999999) == 0 || Double.compare(replicaPulsePower, 0.0) == 0) { replicaPulseVariationsCorrectionFactor = 1.0; } else { if (isERS1Mission) { replicaPulseVariationsCorrectionFactor = replicaPulsePower / 205229.0; } else { replicaPulseVariationsCorrectionFactor = replicaPulsePower / 156000.0; } } /* if (isERS1Mission) { if (pafID.contains(D_PAF) || pafID.contains(I_PAF) || pafID.contains(UK_PAF)) { if (Double.compare(replicaPulsePower, -9999999.9999999) == 0) { if (pafID.contains(D_PAF)) { replicaPulseVariationsCorrectionFactor = getChirpAverageDensityImage() / 267.20; } else { throw new OperatorException("Replica pulse power is not available"); } } else { replicaPulseVariationsCorrectionFactor = replicaPulsePower / 205229.0; } } else if (pafID.contains(ESRIN)) { replicaPulseVariationsCorrectionFactor = getChirpAverageDensityImage() / 267.20; } } else { // ERS-2 if (pafID.contains(D_PAF) || pafID.contains(I_PAF) || pafID.contains(UK_PAF) || pafID.contains(ESRIN)) { if (Double.compare(replicaPulsePower, -9999999.9999999) == 0) { throw new OperatorException("Replica pulse power is not available"); } replicaPulseVariationsCorrectionFactor = replicaPulsePower / 156000.0; } } */ }
From source file:org.esa.nest.gpf.RangeDopplerGeocodingOp.java
/** * Called by the framework in order to compute the stack of tiles for the given target bands. * <p>The default implementation throws a runtime exception with the message "not implemented".</p> * * @param targetTiles The current tiles to be computed for each target band. * @param targetRectangle The area in pixel coordinates to be computed (same for all rasters in <code>targetRasters</code>). * @param pm A progress monitor which should be used to determine computation cancelation requests. * @throws OperatorException if an error occurs during computation of the target rasters. *//*from w ww .ja v a 2 s .c o m*/ @Override public void computeTileStack(Map<Band, Tile> targetTiles, Rectangle targetRectangle, ProgressMonitor pm) throws OperatorException { processingStarted = true; try { if (!isElevationModelAvailable) { getElevationModel(); } } catch (Exception e) { throw new OperatorException(e); } final int x0 = targetRectangle.x; final int y0 = targetRectangle.y; final int w = targetRectangle.width; final int h = targetRectangle.height; //System.out.println("x0 = " + x0 + ", y0 = " + y0 + ", w = " + w + ", h = " + h); final TileGeoreferencing tileGeoRef = new TileGeoreferencing(targetProduct, x0 - 1, y0 - 1, w + 2, h + 2); try { double[][] localDEM = new double[h + 2][w + 2]; if (useAvgSceneHeight) { DEMFactory.fillDEM(localDEM, (float) avgSceneHeight); } else { final boolean valid = DEMFactory.getLocalDEM(dem, demNoDataValue, demResamplingMethod, tileGeoRef, x0, y0, w, h, sourceProduct, nodataValueAtSea, localDEM); if (!valid && nodataValueAtSea) return; } final GeoPos geoPos = new GeoPos(); final double[] earthPoint = new double[3]; final double[] sensorPos = new double[3]; final int srcMaxRange = sourceImageWidth - 1; final int srcMaxAzimuth = sourceImageHeight - 1; ProductData demBuffer = null; ProductData incidenceAngleBuffer = null; ProductData projectedIncidenceAngleBuffer = null; ProductData incidenceAngleFromEllipsoidBuffer = null; final List<TileData> trgTileList = new ArrayList<TileData>(); final Set<Band> keySet = targetTiles.keySet(); for (Band targetBand : keySet) { if (targetBand.getName().equals("elevation")) { demBuffer = targetTiles.get(targetBand).getDataBuffer(); continue; } if (targetBand.getName().equals("incidenceAngle")) { incidenceAngleBuffer = targetTiles.get(targetBand).getDataBuffer(); continue; } if (targetBand.getName().equals("projectedIncidenceAngle")) { projectedIncidenceAngleBuffer = targetTiles.get(targetBand).getDataBuffer(); continue; } if (targetBand.getName().equals("incidenceAngleFromEllipsoid")) { incidenceAngleFromEllipsoidBuffer = targetTiles.get(targetBand).getDataBuffer(); continue; } final Band[] srcBands = targetBandNameToSourceBand.get(targetBand.getName()); final TileData td = new TileData(targetTiles.get(targetBand), srcBands, isPolsar, targetBand.getName(), getBandUnit(targetBand.getName()), absRoot, calibrator, imgResampling); td.applyRadiometricNormalization = targetBandApplyRadiometricNormalizationFlag .get(targetBand.getName()); td.applyRetroCalibration = targetBandApplyRetroCalibrationFlag.get(targetBand.getName()); trgTileList.add(td); } final int maxY = y0 + h; final int maxX = x0 + w; final TileData[] trgTiles = trgTileList.toArray(new TileData[trgTileList.size()]); for (int y = y0; y < maxY; y++) { final int yy = y - y0 + 1; for (int x = x0; x < maxX; x++) { final int index = trgTiles[0].targetTile.getDataBufferIndex(x, y); double alt = localDEM[yy][x - x0 + 1]; if (saveDEM) { demBuffer.setElemDoubleAt(index, alt); } if (alt == demNoDataValue && !useAvgSceneHeight) { if (nodataValueAtSea) { //saveNoDataValueToTarget(index, trgTiles); continue; } } tileGeoRef.getGeoPos(x, y, geoPos); final double lat = geoPos.lat; double lon = geoPos.lon; if (lon >= 180.0) { lon -= 360.0; } if (alt == demNoDataValue && !nodataValueAtSea) { // get corrected elevation for 0 alt = EarthGravitationalModel96.instance().getEGM(lat, lon); } GeoUtils.geo2xyzWGS84(lat, lon, alt, earthPoint); final double zeroDopplerTime = SARGeocoding.getEarthPointZeroDopplerTime(firstLineUTC, lineTimeInterval, wavelength, earthPoint, sensorPosition, sensorVelocity); if (Double.compare(zeroDopplerTime, SARGeocoding.NonValidZeroDopplerTime) == 0) { //saveNoDataValueToTarget(index, trgTiles); continue; } double slantRange = SARGeocoding.computeSlantRange(zeroDopplerTime, timeArray, xPosArray, yPosArray, zPosArray, earthPoint, sensorPos); double azimuthIndex = 0.0; double rangeIndex = 0.0; double zeroDoppler = zeroDopplerTime; if (!skipBistaticCorrection) { // skip bistatic correction for COSMO, TerraSAR-X and RadarSAT-2 zeroDoppler = zeroDopplerTime + slantRange / Constants.lightSpeedInMetersPerDay; } slantRange = SARGeocoding.computeSlantRange(zeroDoppler, timeArray, xPosArray, yPosArray, zPosArray, earthPoint, sensorPos); rangeIndex = SARGeocoding.computeRangeIndex(srgrFlag, sourceImageWidth, firstLineUTC, lastLineUTC, rangeSpacing, zeroDoppler, slantRange, nearEdgeSlantRange, srgrConvParams); if (rangeIndex == -1.0) { //saveNoDataValueToTarget(index, trgTiles); continue; } // the following check will be removed if no product of any mission is read with near range on right if (!nearRangeOnLeft) { rangeIndex = srcMaxRange - rangeIndex; } azimuthIndex = (zeroDoppler - firstLineUTC) / lineTimeInterval; if (!SARGeocoding.isValidCell(rangeIndex, azimuthIndex, lat, lon, latitude, longitude, srcMaxRange, srcMaxAzimuth, sensorPos)) { //saveNoDataValueToTarget(index, trgTiles); } else { final double[] localIncidenceAngles = { SARGeocoding.NonValidIncidenceAngle, SARGeocoding.NonValidIncidenceAngle }; if (saveLocalIncidenceAngle || saveProjectedLocalIncidenceAngle || saveSigmaNought) { final LocalGeometry localGeometry = new LocalGeometry(x, y, tileGeoRef, earthPoint, sensorPos); SARGeocoding.computeLocalIncidenceAngle(localGeometry, demNoDataValue, saveLocalIncidenceAngle, saveProjectedLocalIncidenceAngle, saveSigmaNought, x0, y0, x, y, localDEM, localIncidenceAngles); // in degrees if (saveLocalIncidenceAngle && localIncidenceAngles[0] != SARGeocoding.NonValidIncidenceAngle) { incidenceAngleBuffer.setElemDoubleAt(index, localIncidenceAngles[0]); } if (saveProjectedLocalIncidenceAngle && localIncidenceAngles[1] != SARGeocoding.NonValidIncidenceAngle) { projectedIncidenceAngleBuffer.setElemDoubleAt(index, localIncidenceAngles[1]); } } if (saveIncidenceAngleFromEllipsoid && incidenceAngle != null) { incidenceAngleFromEllipsoidBuffer.setElemDoubleAt(index, incidenceAngle.getPixelFloat((float) rangeIndex, (float) azimuthIndex)); } double satelliteHeight = 0; double sceneToEarthCentre = 0; if (saveSigmaNought) { satelliteHeight = Math.sqrt(sensorPos[0] * sensorPos[0] + sensorPos[1] * sensorPos[1] + sensorPos[2] * sensorPos[2]); sceneToEarthCentre = Math.sqrt(earthPoint[0] * earthPoint[0] + earthPoint[1] * earthPoint[1] + earthPoint[2] * earthPoint[2]); } for (TileData tileData : trgTiles) { int[] subSwathIndex = { INVALID_SUB_SWATH_INDEX }; double v = getPixelValue(azimuthIndex, rangeIndex, tileData, subSwathIndex); if (v != tileData.noDataValue && tileData.applyRadiometricNormalization) { if (localIncidenceAngles[1] != SARGeocoding.NonValidIncidenceAngle) { v = calibrator.applyCalibration(v, rangeIndex, azimuthIndex, slantRange, satelliteHeight, sceneToEarthCentre, localIncidenceAngles[1], tileData.bandPolar, tileData.bandUnit, subSwathIndex); // use projected incidence angle } else { v = tileData.noDataValue; } } tileData.tileDataBuffer.setElemDoubleAt(index, v); } orthoDataProduced = true; } } } localDEM = null; } catch (Throwable e) { orthoDataProduced = true; //to prevent multiple error messages OperatorUtils.catchOperatorException(getId(), e); } }
From source file:i5.las2peer.services.videoAdapter.AdapterClass.java
private JSONArray weightSort(JSONArray finalResult) { int i = 0;// w w w .j a va2s. c o m List<JSONObject> jsonValues = new ArrayList<JSONObject>(); JSONArray sortedJsonArray = new JSONArray(); while (!finalResult.isNull(i)) { JSONObject object = finalResult.getJSONObject(i); jsonValues.add(object); i++; } Collections.sort(jsonValues, new Comparator<JSONObject>() { //You can change "Name" with "ID" if you want to sort by ID private static final String KEY_NAME = "weight"; public int compare(JSONObject a, JSONObject b) { Double valA = 0.0; Double valB = 0.0; try { System.out.println(a.get(KEY_NAME)); valA = a.getDouble(KEY_NAME); valB = b.getDouble(KEY_NAME); } catch (JSONException e) { System.out.println(e); //do something } //System.out.println("valA "+valA); return -Double.compare(valA, valB); //return valA.compareTo(valB); //if you want to change the sort order, simply use the following: //return -valA.compareTo(valB); } }); for (int j = 0; j < finalResult.length(); j++) { sortedJsonArray.put(jsonValues.get(j)); } return sortedJsonArray; }
From source file:cz.cuni.mff.spl.evaluator.statistics.KolmogorovSmirnovTestFlag.java
/*** * Creates {@code H} of size {@code m x m} as described in [1] (see above) * using double-precision.//from www .j a v a 2s.c om * * @param d statistic * @param n sample size * @return H matrix * @throws NumberIsTooLargeException if fractional part is greater than 1 */ private RealMatrix createRoundedH(double d, int n) throws NumberIsTooLargeException { final int k = (int) Math.ceil(n * d); final int m = 2 * k - 1; final double h = k - n * d; if (h >= 1) { throw new NumberIsTooLargeException(h, 1.0, false); } final double[][] Hdata = new double[m][m]; /* * Start by filling everything with either 0 or 1. */ for (int i = 0; i < m; ++i) { for (int j = 0; j < m; ++j) { if (i - j + 1 < 0) { Hdata[i][j] = 0; } else { Hdata[i][j] = 1; } } } /* * Setting up power-array to avoid calculating the same value twice: hPowers[0] = h^1 ... * hPowers[m-1] = h^m */ final double[] hPowers = new double[m]; hPowers[0] = h; for (int i = 1; i < m; ++i) { hPowers[i] = h * hPowers[i - 1]; } /* * First column and last row has special values (each other reversed). */ for (int i = 0; i < m; ++i) { Hdata[i][0] = Hdata[i][0] - hPowers[i]; Hdata[m - 1][i] -= hPowers[m - i - 1]; } /* * [1] states: "For 1/2 < h < 1 the bottom left element of the matrix should be (1 - 2*h^m + * (2h - 1)^m )/m!" Since 0 <= h < 1, then if h > 1/2 is sufficient to check: */ if (Double.compare(h, 0.5) > 0) { Hdata[m - 1][0] += FastMath.pow(2 * h - 1, m); } /* * Aside from the first column and last row, the (i, j)-th element is 1/(i - j + 1)! if i - * j + 1 >= 0, else 0. 1's and 0's are already put, so only division with (i - j + 1)! is * needed in the elements that have 1's. There is no need to calculate (i - j + 1)! and then * divide - small steps avoid overflows. Note that i - j + 1 > 0 <=> i + 1 > j instead of * j'ing all the way to m. Also note that it is started at g = 2 because dividing by 1 isn't * really necessary. */ for (int i = 0; i < m; ++i) { for (int j = 0; j < i + 1; ++j) { if (i - j + 1 > 0) { for (int g = 2; g <= i - j + 1; ++g) { Hdata[i][j] /= g; } } } } return MatrixUtils.createRealMatrix(Hdata); }
From source file:ml.shifu.shifu.core.dtrain.dt.DTMaster.java
@Override public void init(MasterContext<DTMasterParams, DTWorkerParams> context) { Properties props = context.getProps(); // init model config and column config list at first SourceType sourceType;/*from w ww . j a va2 s . com*/ try { sourceType = SourceType .valueOf(props.getProperty(CommonConstants.MODELSET_SOURCE_TYPE, SourceType.HDFS.toString())); this.modelConfig = CommonUtils.loadModelConfig(props.getProperty(CommonConstants.SHIFU_MODEL_CONFIG), sourceType); this.columnConfigList = CommonUtils .loadColumnConfigList(props.getProperty(CommonConstants.SHIFU_COLUMN_CONFIG), sourceType); } catch (IOException e) { throw new RuntimeException(e); } // worker number is used to estimate nodes per iteration for stats this.workerNumber = NumberFormatUtils.getInt(props.getProperty(GuaguaConstants.GUAGUA_WORKER_NUMBER), true); // check if variables are set final selected int[] inputOutputIndex = DTrainUtils.getNumericAndCategoricalInputAndOutputCounts(this.columnConfigList); this.inputNum = inputOutputIndex[0] + inputOutputIndex[1]; this.isAfterVarSelect = (inputOutputIndex[3] == 1); // cache all feature list for sampling features this.allFeatures = this.getAllFeatureList(columnConfigList, isAfterVarSelect); int trainerId = Integer.valueOf(context.getProps().getProperty(CommonConstants.SHIFU_TRAINER_ID, "0")); // If grid search, select valid paramters, if not parameters is what in ModelConfig.json GridSearch gs = new GridSearch(modelConfig.getTrain().getParams(), modelConfig.getTrain().getGridConfigFileContent()); Map<String, Object> validParams = this.modelConfig.getTrain().getParams(); if (gs.hasHyperParam()) { validParams = gs.getParams(trainerId); LOG.info("Start grid search master with params: {}", validParams); } Object vtObj = validParams.get("ValidationTolerance"); if (vtObj != null) { try { validationTolerance = Double.parseDouble(vtObj.toString()); LOG.warn("Validation by tolerance is enabled with value {}.", validationTolerance); } catch (NumberFormatException ee) { validationTolerance = 0d; LOG.warn( "Validation by tolerance isn't enabled because of non numerical value of ValidationTolerance: {}.", vtObj); } } else { LOG.warn("Validation by tolerance isn't enabled."); } // tree related parameters initialization Object fssObj = validParams.get("FeatureSubsetStrategy"); if (fssObj != null) { try { this.featureSubsetRate = Double.parseDouble(fssObj.toString()); // no need validate featureSubsetRate is in (0,1], as already validated in ModelInspector this.featureSubsetStrategy = null; } catch (NumberFormatException ee) { this.featureSubsetStrategy = FeatureSubsetStrategy.of(fssObj.toString()); } } else { LOG.warn("FeatureSubsetStrategy is not set, set to TWOTHRIDS by default in DTMaster."); this.featureSubsetStrategy = FeatureSubsetStrategy.TWOTHIRDS; this.featureSubsetRate = 0; } // max depth Object maxDepthObj = validParams.get("MaxDepth"); if (maxDepthObj != null) { this.maxDepth = Integer.valueOf(maxDepthObj.toString()); } else { this.maxDepth = 10; } // max leaves which is used for leaf-wised tree building, TODO add more benchmarks Object maxLeavesObj = validParams.get("MaxLeaves"); if (maxLeavesObj != null) { this.maxLeaves = Integer.valueOf(maxLeavesObj.toString()); } else { this.maxLeaves = -1; } // enable leaf wise tree building once maxLeaves is configured if (this.maxLeaves > 0) { this.isLeafWise = true; } // maxBatchSplitSize means each time split # of batch nodes Object maxBatchSplitSizeObj = validParams.get("MaxBatchSplitSize"); if (maxBatchSplitSizeObj != null) { this.maxBatchSplitSize = Integer.valueOf(maxBatchSplitSizeObj.toString()); } else { // by default split 32 at most in a batch this.maxBatchSplitSize = 32; } assert this.maxDepth > 0 && this.maxDepth <= 20; // hide in parameters, this to avoid OOM issue for each iteration Object maxStatsMemoryMB = validParams.get("MaxStatsMemoryMB"); if (maxStatsMemoryMB != null) { this.maxStatsMemory = Long.valueOf(validParams.get("MaxStatsMemoryMB").toString()) * 1024 * 1024; if (this.maxStatsMemory > ((2L * Runtime.getRuntime().maxMemory()) / 3)) { // if >= 2/3 max memory, take 2/3 max memory to avoid OOM this.maxStatsMemory = ((2L * Runtime.getRuntime().maxMemory()) / 3); } } else { // by default it is 1/2 of heap, about 1.5G setting in current Shifu this.maxStatsMemory = Runtime.getRuntime().maxMemory() / 2L; } // assert this.maxStatsMemory <= Math.min(Runtime.getRuntime().maxMemory() * 0.6, 800 * 1024 * 1024L); this.treeNum = Integer.valueOf(validParams.get("TreeNum").toString()); this.isRF = ALGORITHM.RF.toString().equalsIgnoreCase(modelConfig.getAlgorithm()); this.isGBDT = ALGORITHM.GBT.toString().equalsIgnoreCase(modelConfig.getAlgorithm()); if (this.isGBDT) { // learning rate only effective in gbdt this.learningRate = Double.valueOf(validParams.get(CommonConstants.LEARNING_RATE).toString()); } // initialize impurity type according to regression or classfication String imStr = validParams.get("Impurity").toString(); int numClasses = 2; if (this.modelConfig.isClassification()) { numClasses = this.modelConfig.getTags().size(); } // these two parameters is to stop tree growth parameters int minInstancesPerNode = Integer.valueOf(validParams.get("MinInstancesPerNode").toString()); double minInfoGain = Double.valueOf(validParams.get("MinInfoGain").toString()); if (imStr.equalsIgnoreCase("entropy")) { impurity = new Entropy(numClasses, minInstancesPerNode, minInfoGain); } else if (imStr.equalsIgnoreCase("gini")) { impurity = new Gini(numClasses, minInstancesPerNode, minInfoGain); } else { impurity = new Variance(minInstancesPerNode, minInfoGain); } // checkpoint folder and interval (every # iterations to do checkpoint) this.checkpointInterval = NumberFormatUtils .getInt(context.getProps().getProperty(CommonConstants.SHIFU_DT_MASTER_CHECKPOINT_INTERVAL, "20")); this.checkpointOutput = new Path(context.getProps() .getProperty(CommonConstants.SHIFU_DT_MASTER_CHECKPOINT_FOLDER, "tmp/cp_" + context.getAppId())); // cache conf to avoid new this.conf = new Configuration(); // if continuous model training is enabled this.isContinuousEnabled = Boolean.TRUE.toString() .equalsIgnoreCase(context.getProps().getProperty(CommonConstants.CONTINUOUS_TRAINING)); this.dtEarlyStopDecider = new DTEarlyStopDecider(this.maxDepth); if (validParams.containsKey("EnableEarlyStop") && Boolean.valueOf(validParams.get("EnableEarlyStop").toString().toLowerCase())) { this.enableEarlyStop = true; } LOG.info( "Master init params: isAfterVarSel={}, featureSubsetStrategy={}, featureSubsetRate={} maxDepth={}, maxStatsMemory={}, " + "treeNum={}, impurity={}, workerNumber={}, minInstancesPerNode={}, minInfoGain={}, isRF={}, " + "isGBDT={}, isContinuousEnabled={}, enableEarlyStop={}.", isAfterVarSelect, featureSubsetStrategy, this.featureSubsetRate, maxDepth, maxStatsMemory, treeNum, imStr, this.workerNumber, minInstancesPerNode, minInfoGain, this.isRF, this.isGBDT, this.isContinuousEnabled, this.enableEarlyStop); this.toDoQueue = new LinkedList<TreeNode>(); if (this.isLeafWise) { this.toSplitQueue = new PriorityQueue<TreeNode>(64, new Comparator<TreeNode>() { @Override public int compare(TreeNode o1, TreeNode o2) { return Double.compare(o2.getNode().getWgtCntRatio() * o2.getNode().getGain(), o1.getNode().getWgtCntRatio() * o1.getNode().getGain()); } }); } // initialize trees if (context.isFirstIteration()) { if (this.isRF) { // for random forest, trees are trained in parallel this.trees = new CopyOnWriteArrayList<TreeNode>(); for (int i = 0; i < treeNum; i++) { this.trees.add(new TreeNode(i, new Node(Node.ROOT_INDEX), 1d)); } } if (this.isGBDT) { if (isContinuousEnabled) { TreeModel existingModel; try { Path modelPath = new Path(context.getProps().getProperty(CommonConstants.GUAGUA_OUTPUT)); existingModel = (TreeModel) ModelSpecLoaderUtils.loadModel(modelConfig, modelPath, ShifuFileUtils .getFileSystemBySourceType(this.modelConfig.getDataSet().getSource())); if (existingModel == null) { // null means no existing model file or model file is in wrong format this.trees = new CopyOnWriteArrayList<TreeNode>(); this.trees.add(new TreeNode(0, new Node(Node.ROOT_INDEX), 1d));// learning rate is 1 for 1st LOG.info("Starting to train model from scratch and existing model is empty."); } else { this.trees = existingModel.getTrees(); this.existingTreeSize = this.trees.size(); // starting from existing models, first tree learning rate is current learning rate this.trees.add(new TreeNode(this.existingTreeSize, new Node(Node.ROOT_INDEX), this.existingTreeSize == 0 ? 1d : this.learningRate)); LOG.info("Starting to train model from existing model {} with existing trees {}.", modelPath, existingTreeSize); } } catch (IOException e) { throw new GuaguaRuntimeException(e); } } else { this.trees = new CopyOnWriteArrayList<TreeNode>(); // for GBDT, initialize the first tree. trees are trained sequentially,first tree learning rate is 1 this.trees.add(new TreeNode(0, new Node(Node.ROOT_INDEX), 1.0d)); } } } else { // recover all states once master is fail over LOG.info("Recover master status from checkpoint file {}", this.checkpointOutput); recoverMasterStatus(sourceType); } }
From source file:org.mifos.accounts.loan.struts.actionforms.LoanAccountActionForm.java
public boolean isAmountZeroOrNull(String loanAmount) { return StringUtils.isBlank(loanAmount) || (Double.compare(new LocalizationConverter().getDoubleValueForCurrentLocale(loanAmount), NumberUtils.DOUBLE_ZERO) == 0); }
From source file:org.sakaiproject.tool.assessment.services.GradingService.java
/** * This is the big, complicated mess where we take all the items in * an assessment, store the grading data, auto-grade it, and update * everything.//w w w. ja v a 2s .c om * * If regrade is true, we just recalculate the graded score. If it's * false, we do everything from scratch. */ public void storeGrades(AssessmentGradingData data, boolean regrade, PublishedAssessmentIfc pub, HashMap publishedItemHash, HashMap publishedItemTextHash, HashMap publishedAnswerHash, boolean persistToDB, HashMap invalidFINMap, ArrayList invalidSALengthList) throws GradebookServiceException, FinFormatException { log.debug("****x1. regrade =" + regrade + " " + (new Date()).getTime()); try { boolean imageMapAllOk = true; boolean NeededAllOk = false; String agent = data.getAgentId(); // note that this itemGradingSet is a partial set of answer submitted. it contains only // newly submitted answers, updated answers and MCMR/FIB/FIN answers ('cos we need the old ones to // calculate scores for new ones) Set<ItemGradingData> itemGradingSet = data.getItemGradingSet(); if (itemGradingSet == null) itemGradingSet = new HashSet<ItemGradingData>(); log.debug("****itemGrading size=" + itemGradingSet.size()); List<ItemGradingData> tempItemGradinglist = new ArrayList<ItemGradingData>(itemGradingSet); // CALCULATED_QUESTION - if this is a calc question. Carefully sort the list of answers if (isCalcQuestion(tempItemGradinglist, publishedItemHash)) { Collections.sort(tempItemGradinglist, new Comparator<ItemGradingData>() { public int compare(ItemGradingData o1, ItemGradingData o2) { ItemGradingData gradeData1 = o1; ItemGradingData gradeData2 = o2; // protect against blank ones in samigo initial setup. if (gradeData1 == null) return -1; if (gradeData2 == null) return 1; if (gradeData1.getPublishedAnswerId() == null) return -1; if (gradeData2.getPublishedAnswerId() == null) return 1; return gradeData1.getPublishedAnswerId().compareTo(gradeData2.getPublishedAnswerId()); } }); } Iterator<ItemGradingData> iter = tempItemGradinglist.iterator(); // fibEmiAnswersMap contains a map of HashSet of answers for a FIB or EMI item, // key =itemid, value= HashSet of answers for each item. // For FIB: This is used to keep track of answers we have already used for // mutually exclusive multiple answer type of FIB, such as // The flag of the US is {red|white|blue},{red|white|blue}, and {red|white|blue}. // so if the first blank has an answer 'red', the 'red' answer should // not be included in the answers for the other mutually exclusive blanks. // For EMI: This keeps track of how many answers were given so we don't give // extra marks for to many answers. Map fibEmiAnswersMap = new HashMap(); Map<Long, Map<Long, Set<EMIScore>>> emiScoresMap = new HashMap<Long, Map<Long, Set<EMIScore>>>(); //change algorithm based on each question (SAK-1930 & IM271559) -cwen HashMap totalItems = new HashMap(); log.debug("****x2. " + (new Date()).getTime()); double autoScore = (double) 0; Long itemId = (long) 0; int calcQuestionAnswerSequence = 1; // sequence of answers for CALCULATED_QUESTION while (iter.hasNext()) { ItemGradingData itemGrading = iter.next(); // CALCULATED_QUESTION - We increment this so we that calculated // questions can know where we are in the sequence of answers. if (itemGrading.getPublishedItemId().equals(itemId)) { calcQuestionAnswerSequence++; } else { calcQuestionAnswerSequence = 1; } itemId = itemGrading.getPublishedItemId(); ItemDataIfc item = (ItemDataIfc) publishedItemHash.get(itemId); if (item == null) { //this probably shouldn't happen log.error("unable to retrive itemDataIfc for: " + publishedItemHash.get(itemId)); continue; } Iterator i = item.getItemMetaDataSet().iterator(); while (i.hasNext()) { ItemMetaDataIfc meta = (ItemMetaDataIfc) i.next(); if (meta.getLabel().equals(ItemMetaDataIfc.REQUIRE_ALL_OK)) { if (meta.getEntry().equals("true")) { NeededAllOk = true; break; } if (meta.getEntry().equals("false")) { NeededAllOk = false; break; } } } Long itemType = item.getTypeId(); autoScore = (double) 0; itemGrading.setAssessmentGradingId(data.getAssessmentGradingId()); //itemGrading.setSubmittedDate(new Date()); itemGrading.setAgentId(agent); itemGrading.setOverrideScore(Double.valueOf(0)); if (itemType == 5 && itemGrading.getAnswerText() != null) { String processedAnswerText = itemGrading.getAnswerText().replaceAll("\r", "").replaceAll("\n", ""); if (processedAnswerText.length() > 32000) { if (invalidSALengthList != null) { invalidSALengthList.add(item.getItemId()); } } } // note that totalItems & fibAnswersMap would be modified by the following method try { autoScore = getScoreByQuestionType(itemGrading, item, itemType, publishedItemTextHash, totalItems, fibEmiAnswersMap, emiScoresMap, publishedAnswerHash, regrade, calcQuestionAnswerSequence); } catch (FinFormatException e) { autoScore = 0d; if (invalidFINMap != null) { if (invalidFINMap.containsKey(itemId)) { ArrayList list = (ArrayList) invalidFINMap.get(itemId); list.add(itemGrading.getItemGradingId()); } else { ArrayList list = new ArrayList(); list.add(itemGrading.getItemGradingId()); invalidFINMap.put(itemId, list); } } } if ((TypeIfc.IMAGEMAP_QUESTION.equals(itemType)) && (NeededAllOk) && ((autoScore == -123456789) || !imageMapAllOk)) { autoScore = 0; imageMapAllOk = false; } log.debug("**!regrade, autoScore=" + autoScore); if (!(TypeIfc.MULTIPLE_CORRECT).equals(itemType) && !(TypeIfc.EXTENDED_MATCHING_ITEMS).equals(itemType)) totalItems.put(itemId, Double.valueOf(autoScore)); if (regrade && TypeIfc.AUDIO_RECORDING.equals(itemType)) itemGrading.setAttemptsRemaining(item.getTriesAllowed()); itemGrading.setAutoScore(Double.valueOf(autoScore)); } if ((invalidFINMap != null && invalidFINMap.size() > 0) || (invalidSALengthList != null && invalidSALengthList.size() > 0)) { return; } // Added persistToDB because if we don't save data to DB later, we shouldn't update the assessment // submittedDate either. The date should be sync in delivery bean and DB // This is for DeliveryBean.checkDataIntegrity() if (!regrade && persistToDB) { data.setSubmittedDate(new Date()); setIsLate(data, pub); } log.debug("****x3. " + (new Date()).getTime()); List<ItemGradingData> emiItemGradings = new ArrayList<ItemGradingData>(); // the following procedure ensure total score awarded per question is no less than 0 // this probably only applies to MCMR question type - daisyf iter = itemGradingSet.iterator(); //since the itr goes through each answer (multiple answers for a signle mc question), keep track //of its total score by itemId -> autoScore[]{user's score, total possible} Map<Long, Double[]> mcmcAllOrNothingCheck = new HashMap<Long, Double[]>(); Map<Long, Integer> countMcmcAllItemGradings = new HashMap<Long, Integer>(); //get item information to check if it's MCMS and Not Partial Credit Long itemType2 = -1l; String mcmsPartialCredit = ""; double itemScore = -1; while (iter.hasNext()) { ItemGradingData itemGrading = iter.next(); itemId = itemGrading.getPublishedItemId(); ItemDataIfc item = (ItemDataIfc) publishedItemHash.get(itemId); //SAM-1724 it's possible the item is not in the hash -DH if (item == null) { log.error("unable to retrive itemDataIfc for: " + publishedItemHash.get(itemId)); continue; } itemType2 = item.getTypeId(); //get item information to check if it's MCMS and Not Partial Credit mcmsPartialCredit = item.getItemMetaDataByLabel(ItemMetaDataIfc.MCMS_PARTIAL_CREDIT); itemScore = item.getScore(); //double autoScore = (double) 0; // this does not apply to EMI // just create a short-list and handle differently below if ((TypeIfc.EXTENDED_MATCHING_ITEMS).equals(itemType2)) { emiItemGradings.add(itemGrading); continue; } double eachItemScore = ((Double) totalItems.get(itemId)).doubleValue(); if ((eachItemScore < 0) && !((TypeIfc.MULTIPLE_CHOICE).equals(itemType2) || (TypeIfc.TRUE_FALSE).equals(itemType2) || (TypeIfc.MULTIPLE_CORRECT_SINGLE_SELECTION).equals(itemType2))) { itemGrading.setAutoScore(Double.valueOf(0)); } //keep track of MCMC answer's total score in order to check for all or nothing if (TypeIfc.MULTIPLE_CORRECT.equals(itemType2) && "false".equals(mcmsPartialCredit)) { Double accumulatedScore = itemGrading.getAutoScore(); if (mcmcAllOrNothingCheck.containsKey(itemId)) { Double[] accumulatedScoreArr = mcmcAllOrNothingCheck.get(itemId); accumulatedScore += accumulatedScoreArr[0]; } mcmcAllOrNothingCheck.put(itemId, new Double[] { accumulatedScore, item.getScore() }); int count = 0; if (countMcmcAllItemGradings.containsKey(itemId)) count = ((Integer) countMcmcAllItemGradings.get(itemId)).intValue(); countMcmcAllItemGradings.put(itemId, new Integer(++count)); } } log.debug("****x3.1 " + (new Date()).getTime()); // Loop 1: this procedure ensure total score awarded per EMI item // is correct // For emi's there are multiple gradings per item per question, // for the grading we only know scores after grading so we need // to reset the grading score here to the correct scores // this currently only applies to EMI question type if (emiItemGradings != null && !emiItemGradings.isEmpty()) { Map<Long, Map<Long, Map<Long, EMIScore>>> emiOrderedScoresMap = reorderEMIScoreMap(emiScoresMap); iter = emiItemGradings.iterator(); while (iter.hasNext()) { ItemGradingData itemGrading = iter.next(); //SAM-2016 check for Nullity if (itemGrading == null) { log.warn("Map contains null itemgrading!"); continue; } Map<Long, Map<Long, EMIScore>> innerMap = emiOrderedScoresMap .get(itemGrading.getPublishedItemId()); if (innerMap == null) { log.warn("Inner map is empty!"); continue; } Map<Long, EMIScore> scoreMap = innerMap.get(itemGrading.getPublishedItemTextId()); if (scoreMap == null) { log.warn("Score map is empty!"); continue; } EMIScore score = scoreMap.get(itemGrading.getPublishedAnswerId()); if (score == null) { //its possible! SAM-2016 log.warn("we can't find a score for answer: " + itemGrading.getPublishedAnswerId()); continue; } itemGrading.setAutoScore(emiOrderedScoresMap.get(itemGrading.getPublishedItemId()) .get(itemGrading.getPublishedItemTextId()) .get(itemGrading.getPublishedAnswerId()).effectiveScore); } } // if it's MCMS and Not Partial Credit and the score isn't 100% (totalAutoScoreCheck != itemScore), // that means the user didn't answer all of the correct answers only. // We need to set their score to 0 for all ItemGrading items for (Entry<Long, Double[]> entry : mcmcAllOrNothingCheck.entrySet()) { if (Double.compare(entry.getValue()[0], entry.getValue()[1]) != 0) { //reset all scores to 0 since the user didn't get all correct answers iter = itemGradingSet.iterator(); while (iter.hasNext()) { ItemGradingData itemGrading = iter.next(); Long itemId2 = entry.getKey(); if (itemGrading.getPublishedItemId().equals(itemId2)) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash .get(itemGrading.getPublishedAnswerId()); if (answer == null) { itemGrading.setAutoScore(Double.valueOf(0)); log.error("unable to retrieve answerIfc for: " + itemId2); continue; } if (!countMcmcAllItemGradings.containsKey(itemId2)) { itemGrading.setAutoScore(Double.valueOf(0)); log.error("unable to retrieve itemGrading's counter for: " + itemId2); continue; } double discount = (Math.abs(answer.getDiscount().doubleValue()) * ((double) -1)); int count = ((Integer) countMcmcAllItemGradings.get(itemId2)).intValue(); double itemGrDisc = discount / count; itemGrading.setAutoScore(Double.valueOf(itemGrDisc)); } } } } log.debug("****x4. " + (new Date()).getTime()); // save#1: this itemGrading Set is a partial set of answers submitted. it contains new answers and // updated old answers and FIB answers ('cos we need the old answer to calculate the score for new // ones). we need to be cheap, we don't want to update record that hasn't been // changed. Yes, assessmentGrading's total score will be out of sync at this point, I am afraid. It // would be in sync again once the whole method is completed sucessfully. if (persistToDB) { saveOrUpdateAll(itemGradingSet); } log.debug("****x5. " + (new Date()).getTime()); // save#2: now, we need to get the full set so we can calculate the total score accumulate for the // whole assessment. Set fullItemGradingSet = getItemGradingSet(data.getAssessmentGradingId().toString()); double totalAutoScore = getTotalAutoScore(fullItemGradingSet); data.setTotalAutoScore(Double.valueOf(totalAutoScore)); //log.debug("**#1 total AutoScore"+totalAutoScore); if (Double.compare((totalAutoScore + data.getTotalOverrideScore().doubleValue()), new Double("0").doubleValue()) < 0) { data.setFinalScore(Double.valueOf("0")); } else { data.setFinalScore(Double.valueOf(totalAutoScore + data.getTotalOverrideScore().doubleValue())); } log.debug("****x6. " + (new Date()).getTime()); } catch (GradebookServiceException ge) { ge.printStackTrace(); throw ge; } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } // save#3: itemGradingSet has been saved above so just need to update assessmentGrading // therefore setItemGradingSet as empty first - daisyf // however, if we do not persit to DB, we want to keep itemGradingSet with data for later use // Because if itemGradingSet is not saved to DB, we cannot go to DB to get it. We have to // get it through data. if (persistToDB) { data.setItemGradingSet(new HashSet()); saveOrUpdateAssessmentGrading(data); log.debug("****x7. " + (new Date()).getTime()); if (!regrade) { notifyGradebookByScoringType(data, pub); } } log.debug("****x8. " + (new Date()).getTime()); // I am not quite sure what the following code is doing... I modified this based on my assumption: // If this happens dring regrade, we don't want to clean these data up // We only want to clean them out in delivery if (!regrade && Boolean.TRUE.equals(data.getForGrade())) { // remove the assessmentGradingData created during gradiing (by updatding total score page) removeUnsubmittedAssessmentGradingData(data); } }
From source file:org.unitime.timetable.test.StudentSectioningTest.java
private static void testSectioning(Element studentElement, Element response, Session session) { try {//from w ww . ja v a 2 s .c om System.out.print("Request:"); new XMLWriter(System.out, OutputFormat.createPrettyPrint()).write(studentElement); } catch (Exception e) { } Student student = new Student(Long.parseLong(studentElement.attributeValue("key"))); sLog.info(" loading student " + student.getId()); String courseNumbersMustBeUnique = ApplicationProperties.getProperty("tmtbl.courseNumber.unique", "true"); StudentSctBBTest sbt = null; boolean commit = false; Vector messages = new Vector(); if (studentElement.element("retrieveCourseRequests") != null) { loadStudent(session, student, messages); sbt = new StudentSctBBTest(student); for (Iterator e = student.getRequests().iterator(); e.hasNext();) { Request request = (Request) e.next(); if (request.getInitialAssignment() != null) request.assign(0, request.getInitialAssignment()); } for (Iterator e = student.getRequests().iterator(); e.hasNext();) { Request request = (Request) e.next(); if (request instanceof FreeTimeRequest) { Enrollment enrollment = (Enrollment) request.values().get(0); if (sbt.conflictValues(enrollment).isEmpty()) request.assign(0, enrollment); } } } Element courseRequestsElement = studentElement.element("updateCourseRequests"); if (courseRequestsElement == null) { sLog.warn(" No course requests for student " + student.getId()); } else { long reqId = 0; int priority = 0; commit = "true".equals(courseRequestsElement.attributeValue("commit")); for (Iterator i = courseRequestsElement.elementIterator(); i.hasNext();) { Element requestElement = (Element) i.next(); boolean alternative = "true".equals(requestElement.attributeValue("alternative")); if ("freeTime".equals(requestElement.getName())) { String days = requestElement.attributeValue("days"); String startTime = requestElement.attributeValue("startTime"); String length = requestElement.attributeValue("length"); String endTime = requestElement.attributeValue("endTime"); FreeTimeRequest ftRequest = new FreeTimeRequest(reqId++, priority++, alternative, student, makeTime(session.getDefaultDatePattern(), days, startTime, endTime, length)); sLog.info(" added " + ftRequest); } else if ("courseOffering".equals(requestElement.getName())) { String subjectArea = requestElement.attributeValue("subjectArea"); String courseNumber = requestElement.attributeValue("courseNumber"); boolean waitlist = "true".equals(requestElement.attributeValue("waitlist", "false")); Long timeStamp = (requestElement.attributeValue("timeStamp") == null ? null : Long.parseLong(requestElement.attributeValue("timeStamp"))); CourseOffering co = null; if (courseNumbersMustBeUnique.equalsIgnoreCase("true")) { co = CourseOffering.findBySessionSubjAreaAbbvCourseNbr(session.getUniqueId(), subjectArea, courseNumber); } else { String title = requestElement.attributeValue("title"); co = CourseOffering.findBySessionSubjAreaAbbvCourseNbrTitle(session.getUniqueId(), subjectArea, courseNumber, title); } if (co == null) { sLog.warn(" Course " + subjectArea + " " + courseNumber + " not found."); continue; } Vector courses = new Vector(); courses.add(loadCourse(co, student.getId())); for (Iterator j = requestElement.elementIterator("alternative"); j.hasNext();) { Element altElement = (Element) j.next(); String altSubjectArea = altElement.attributeValue("subjectArea"); String altCourseNumber = altElement.attributeValue("courseNumber"); CourseOffering aco = null; if (courseNumbersMustBeUnique.equalsIgnoreCase("true")) { aco = CourseOffering.findBySessionSubjAreaAbbvCourseNbr(session.getUniqueId(), altSubjectArea, altCourseNumber); } else { String altTitle = altElement.attributeValue("title"); aco = CourseOffering.findBySessionSubjAreaAbbvCourseNbrTitle(session.getUniqueId(), altSubjectArea, altCourseNumber, altTitle); } if (aco != null) courses.add(loadCourse(aco, student.getId())); } CourseRequest cRequest = new CourseRequest(reqId++, priority++, alternative, student, courses, waitlist, timeStamp); cRequest.values(); sLog.info(" added " + cRequest); } } Element requestScheduleElement = studentElement.element("requestSchedule"); if (requestScheduleElement != null) { for (Iterator i = requestScheduleElement.elementIterator("courseOffering"); i.hasNext();) { Element courseOfferingElement = (Element) i.next(); String subjectArea = courseOfferingElement.attributeValue("subjectArea"); String courseNumber = courseOfferingElement.attributeValue("courseNumber"); CourseOffering co = null; if (courseNumbersMustBeUnique.equalsIgnoreCase("true")) { co = CourseOffering.findBySessionSubjAreaAbbvCourseNbr(session.getUniqueId(), subjectArea, courseNumber); } else { String title = courseOfferingElement.attributeValue("title"); co = CourseOffering.findBySessionSubjAreaAbbvCourseNbrTitle(session.getUniqueId(), subjectArea, courseNumber, title); } if (co == null) { sLog.warn(" Course " + subjectArea + " " + courseNumber + " not found."); continue; } for (Iterator e = student.getRequests().iterator(); e.hasNext();) { Request request = (Request) e.next(); if (request instanceof CourseRequest) { CourseRequest courseRequest = (CourseRequest) request; Course course = courseRequest.getCourse(co.getUniqueId().longValue()); Config config = null; if (course == null) continue; Set assignedSections = new HashSet(); int nrClasses = 0; for (Iterator j = courseOfferingElement.elementIterator("class"); j .hasNext(); nrClasses++) { Element classEl = (Element) j.next(); String assignmentId = classEl.attributeValue("assignmentId"); Section section = (assignmentId == null ? null : course.getOffering().getSection(Long.parseLong(assignmentId))); if (section != null) { assignedSections.add(section); if (config == null) config = section.getSubpart().getConfig(); } for (Iterator k = classEl.elementIterator("choice"); k.hasNext();) { Element choiceEl = (Element) k.next(); Choice choice = new Choice(course.getOffering(), choiceEl.attributeValue("id")); if ("select".equals(choiceEl.attributeValue("selection"))) { courseRequest.getSelectedChoices().add(choice); sLog.info(" add selection " + choice); } else { courseRequest.getWaitlistedChoices().add(choice); sLog.info(" add waitlist " + choice); } } } if (nrClasses == assignedSections.size()) { courseRequest .setInitialAssignment(new Enrollment(request, 0, config, assignedSections)); sLog.info(" initial assignment " + courseRequest.getInitialAssignment()); } } } } } else { sLog.warn(" No schedule requests for student " + student.getId()); } sLog.info(" sectioning student " + student.getId()); sbt = new StudentSctBBTest(student); Model model = sbt.getSolution().getModel(); messages.addAll(sbt.getMessages()); sLog.info(" info: " + model.getInfo()); if (commit) saveStudent(session, student, messages); } Element studentResponseElement = response.addElement("student"); studentResponseElement.addAttribute("key", String.valueOf(student.getId())); Element ackResponseElement = studentResponseElement.addElement("acknowledgement"); ackResponseElement.addAttribute("result", "ok"); Element courseReqResponseElement = studentResponseElement.addElement("courseRequests"); for (Iterator e = messages.iterator(); e.hasNext();) { StudentSctBBTest.Message message = (StudentSctBBTest.Message) e.next(); ackResponseElement.addElement("message").addAttribute("type", message.getLevelString()) .setText(message.getMessage()); } for (Iterator e = student.getRequests().iterator(); e.hasNext();) { Request request = (Request) e.next(); Element reqElement = null; if (request instanceof FreeTimeRequest) { FreeTimeRequest ftRequest = (FreeTimeRequest) request; reqElement = courseReqResponseElement.addElement("freeTime"); reqElement.addAttribute("days", dayCode2days(ftRequest.getTime().getDayCode())); reqElement.addAttribute("startTime", startSlot2startTime(ftRequest.getTime().getStartSlot())); reqElement.addAttribute("endTime", timeLocation2endTime(ftRequest.getTime())); reqElement.addAttribute("length", String.valueOf(Constants.SLOT_LENGTH_MIN * ftRequest.getTime().getLength())); sLog.info(" added " + ftRequest); } else { CourseRequest courseRequest = (CourseRequest) request; reqElement = courseReqResponseElement.addElement("courseOffering"); for (Iterator f = courseRequest.getCourses().iterator(); f.hasNext();) { Course course = (Course) f.next(); Element element = (reqElement.attribute("subjectArea") == null ? reqElement : reqElement.addElement("alternative")); element.addAttribute("subjectArea", course.getSubjectArea()); element.addAttribute("courseNumber", course.getCourseNumber()); CourseOffering co = CourseOffering.findByUniqueId(course.getId()); element.addAttribute("title", (co.getTitle() != null ? co.getTitle() : "")); } reqElement.addAttribute("waitlist", courseRequest.isWaitlist() ? "true" : "false"); if (courseRequest.getTimeStamp() != null) reqElement.addAttribute("timeStamp", courseRequest.getTimeStamp().toString()); sLog.info(" added " + courseRequest); } if (request.isAlternative()) reqElement.addAttribute("alternative", "true"); } Comparator choiceComparator = new Comparator() { public int compare(Object o1, Object o2) { Choice c1 = (Choice) o1; Choice c2 = (Choice) o2; if (c1.getTime() == null) { if (c2.getTime() != null) return -1; } else if (c2.getTime() == null) return 1; if (c1.getTime() != null) { int cmp = -Double.compare(c1.getTime().getDayCode(), c2.getTime().getDayCode()); if (cmp != 0) return cmp; cmp = Double.compare(c1.getTime().getStartSlot(), c2.getTime().getStartSlot()); if (cmp != 0) return cmp; cmp = c1.getTime().getDatePatternName().compareTo(c2.getTime().getDatePatternName()); if (cmp != 0) return cmp; } if (c1.getInstructorNames() == null) { if (c2.getInstructorNames() != null) return -1; } else if (c2.getInstructorNames() == null) return 1; if (c1.getInstructorNames() != null) { int cmp = c1.getInstructorNames().compareTo(c2.getInstructorNames()); if (cmp != 0) return cmp; } return c1.getId().compareTo(c2.getId()); } }; boolean generateRandomAvailability = (student.getId() < 0); Element scheduleResponseElement = studentResponseElement.addElement("schedule"); scheduleResponseElement.addAttribute("type", (commit ? "actual" : "proposed")); for (Iterator e = student.getRequests().iterator(); e.hasNext();) { Request request = (Request) e.next(); if (request.getAssignment() == null) { sLog.info(" request " + request + " has no assignment"); if (request instanceof CourseRequest && ((CourseRequest) request).isWaitlist() && request.getStudent().canAssign(request)) { Element courseOfferingElement = scheduleResponseElement.addElement("courseOffering"); Course course = (Course) ((CourseRequest) request).getCourses().get(0); courseOfferingElement.addAttribute("subjectArea", course.getSubjectArea()); courseOfferingElement.addAttribute("courseNumber", course.getCourseNumber()); CourseOffering co = CourseOffering.findByUniqueId(course.getId()); courseOfferingElement.addAttribute("title", co.getTitle()); courseOfferingElement.addAttribute("waitlist", ((CourseRequest) request).isWaitlist() ? "true" : "false"); if (((CourseRequest) request).getTimeStamp() != null) courseOfferingElement.addAttribute("timeStamp", ((CourseRequest) request).getTimeStamp().toString()); } continue; } if (request instanceof FreeTimeRequest) { FreeTimeRequest ftRequest = (FreeTimeRequest) request; Element ftElement = scheduleResponseElement.addElement("freeTime"); ftElement.addAttribute("days", dayCode2days(ftRequest.getTime().getDayCode())); ftElement.addAttribute("startTime", startSlot2startTime(ftRequest.getTime().getStartSlot())); ftElement.addAttribute("endTime", timeLocation2endTime(ftRequest.getTime())); ftElement.addAttribute("length", String.valueOf(Constants.SLOT_LENGTH_MIN * ftRequest.getTime().getLength())); if (ftRequest.getTime() != null) ftElement.addAttribute("time", ftRequest.getTime().getDayHeader() + " " + ftRequest.getTime().getStartTimeHeader() + " - " + ftRequest.getTime().getEndTimeHeader()); else ftElement.addAttribute("time", "Arr Hrs"); } else { CourseRequest courseRequest = (CourseRequest) request; Element courseOfferingElement = scheduleResponseElement.addElement("courseOffering"); Enrollment enrollment = (Enrollment) request.getAssignment(); Set unusedInstructionalTypes = null; Offering offering = null; HashSet availableChoices = null; Vector assignments = new Vector(enrollment.getAssignments()); Collections.sort(assignments, new Comparator() { public int compare(Object o1, Object o2) { Section s1 = (Section) o1; Section s2 = (Section) o2; return s1.getSubpart().compareTo(s2.getSubpart()); } }); for (Iterator i = assignments.iterator(); i.hasNext();) { Section section = (Section) i.next(); if (courseOfferingElement.attribute("subjectArea") == null) { Course course = enrollment.getCourse(); courseOfferingElement.addAttribute("subjectArea", course.getSubjectArea()); courseOfferingElement.addAttribute("courseNumber", course.getCourseNumber()); CourseOffering co = CourseOffering.findByUniqueId(course.getId()); courseOfferingElement.addAttribute("title", co.getTitle()); } if (offering == null) { offering = section.getSubpart().getConfig().getOffering(); if (generateRandomAvailability) { availableChoices = generateAvailableChoices(offering, new Random(13031978l), 0.75); } else { availableChoices = new HashSet(); for (Iterator j = courseRequest.getAvaiableEnrollmentsSkipSameTime().iterator(); j .hasNext();) { Enrollment enr = (Enrollment) j.next(); for (Iterator k = enr.getAssignments().iterator(); k.hasNext();) { Section s = (Section) k.next(); if (s.getLimit() > 0 && s.getPenalty() <= sAvailableThreshold) availableChoices.add(s.getChoice()); } } } } if (unusedInstructionalTypes == null) unusedInstructionalTypes = section.getSubpart().getConfig().getOffering() .getInstructionalTypes(); unusedInstructionalTypes.remove(section.getSubpart().getInstructionalType()); Element classElement = courseOfferingElement.addElement("class"); classElement.addAttribute("id", section.getSubpart().getInstructionalType()); classElement.addAttribute("assignmentId", String.valueOf(section.getId())); if (section.getSubpart().getParent() != null) classElement.addAttribute("parent", section.getSubpart().getParent().getInstructionalType()); classElement.addAttribute("name", section.getSubpart().getName()); if (section.getTime() != null) { classElement.addAttribute("days", dayCode2days(section.getTime().getDayCode())); classElement.addAttribute("startTime", startSlot2startTime(section.getTime().getStartSlot())); classElement.addAttribute("endTime", timeLocation2endTime(section.getTime())); //classElement.addAttribute("length", String.valueOf(Constants.SLOT_LENGTH_MIN*section.getTime().getLength())); if (section.getTime().getDatePatternName() != null) classElement.addAttribute("date", section.getTime().getDatePatternName()); classElement.addAttribute("time", section.getTime().getDayHeader() + " " + section.getTime().getStartTimeHeader() + " - " + section.getTime().getEndTimeHeader()); } else classElement.addAttribute("time", "Arr Hrs"); if (section.getNrRooms() > 0) { String location = ""; for (Iterator f = section.getRooms().iterator(); f.hasNext();) { RoomLocation rl = (RoomLocation) f.next(); location += rl.getName(); if (f.hasNext()) location += ","; } classElement.addAttribute("location", location); } if (section.getChoice().getInstructorNames() != null) classElement.addAttribute("instructor", section.getChoice().getInstructorNames()); Vector choices = new Vector(section.getSubpart().getConfig().getOffering() .getChoices(section.getSubpart().getInstructionalType())); Collections.sort(choices, choiceComparator); for (Iterator f = choices.iterator(); f.hasNext();) { Choice choice = (Choice) f.next(); Element choiceEl = classElement.addElement("choice"); choiceEl.addAttribute("id", choice.getId()); choiceEl.addAttribute("available", (availableChoices == null ? "true" : availableChoices.contains(choice) ? "true" : "false")); if (choice.getTime() != null) { choiceEl.addAttribute("days", dayCode2days(choice.getTime().getDayCode())); choiceEl.addAttribute("startTime", startSlot2startTime(choice.getTime().getStartSlot())); choiceEl.addAttribute("endTime", timeLocation2endTime(choice.getTime())); if (choice.getTime().getDatePatternName() != null) choiceEl.addAttribute("date", choice.getTime().getDatePatternName()); choiceEl.addAttribute("time", choice.getTime().getDayHeader() + " " + choice.getTime().getStartTimeHeader() + " - " + choice.getTime().getEndTimeHeader()); } else choiceEl.addAttribute("time", "Arr Hrs"); if (choice.equals(section.getChoice())) choiceEl.addAttribute("available", "true"); if (courseRequest.getSelectedChoices().isEmpty() && choice.equals(section.getChoice())) { choiceEl.addAttribute("selection", "select"); } else if (courseRequest.getSelectedChoices().contains(choice)) { choiceEl.addAttribute("selection", "select"); if (generateRandomAvailability) choiceEl.addAttribute("available", "true"); } else if (courseRequest.getWaitlistedChoices().contains(choice)) { choiceEl.addAttribute("selection", "wait"); if (generateRandomAvailability) choiceEl.addAttribute("available", "false"); } if (choice.getInstructorNames() != null) choiceEl.addAttribute("instructor", choice.getInstructorNames()); exportDependencies(choiceEl, choice, choice.getParentSections()); } } if (unusedInstructionalTypes != null) { for (Iterator i = unusedInstructionalTypes.iterator(); i.hasNext();) { String unusedInstructionalType = (String) i.next(); Element classElement = courseOfferingElement.addElement("class"); classElement.addAttribute("id", unusedInstructionalType); classElement.addAttribute("name", ((Subpart) offering.getSubparts(unusedInstructionalType).iterator().next()) .getName()); Vector choices = new Vector(offering.getChoices(unusedInstructionalType)); Collections.sort(choices, choiceComparator); for (Iterator f = choices.iterator(); f.hasNext();) { Choice choice = (Choice) f.next(); Element choiceEl = classElement.addElement("choice"); choiceEl.addAttribute("id", choice.getId()); choiceEl.addAttribute("available", (availableChoices == null ? "true" : availableChoices.contains(choice) ? "true" : "false")); if (choice.getTime() != null) { choiceEl.addAttribute("days", dayCode2days(choice.getTime().getDayCode())); choiceEl.addAttribute("startTime", startSlot2startTime(choice.getTime().getStartSlot())); choiceEl.addAttribute("endTime", timeLocation2endTime(choice.getTime())); if (choice.getTime().getDatePatternName() != null) choiceEl.addAttribute("date", choice.getTime().getDatePatternName()); choiceEl.addAttribute("time", choice.getTime().getDayHeader() + " " + choice.getTime().getStartTimeHeader() + " - " + choice.getTime().getEndTimeHeader()); } else choiceEl.addAttribute("time", "Arr Hrs"); if (courseRequest.getWaitlistedChoices().contains(choice)) choiceEl.addAttribute("selection", "wait"); if (choice.getInstructorNames() != null) choiceEl.addAttribute("instructor", choice.getInstructorNames()); exportDependencies(choiceEl, choice, choice.getParentSections()); } } } } sLog.info(" added " + request.getAssignment()); } /* try { System.out.print("Response:"); new XMLWriter(System.out,OutputFormat.createPrettyPrint()).write(studentResponseElement); } catch (Exception e) {} */ }
From source file:org.nd4j.linalg.factory.Nd4j.java
/** * Sort an ndarray along a particular dimension * * @param ndarray the ndarray to sort/* w ww. java 2 s . co m*/ * @param dimension the dimension to sort * @return an array with indices and the sorted ndarray */ public static INDArray[] sortWithIndices(IComplexNDArray ndarray, int dimension, boolean ascending) { INDArray indices = Nd4j.create(ndarray.shape()); INDArray[] ret = new INDArray[2]; for (int i = 0; i < ndarray.vectorsAlongDimension(dimension); i++) { IComplexNDArray vec = ndarray.vectorAlongDimension(i, dimension); INDArray indexVector = indices.vectorAlongDimension(i, dimension); final IComplexNumber[] data = new IComplexNumber[vec.length()]; final Double[] index = new Double[vec.length()]; for (int j = 0; j < vec.length(); j++) { data[j] = vec.getComplex(j); index[j] = (double) j; } if (ascending) Arrays.sort(index, new Comparator<Double>() { @Override public int compare(Double o1, Double o2) { int idx1 = (int) o1.doubleValue(); int idx2 = (int) o2.doubleValue(); return Double.compare(data[idx1].absoluteValue().doubleValue(), data[idx2].absoluteValue().doubleValue()); } }); else Arrays.sort(index, new Comparator<Double>() { @Override public int compare(Double o1, Double o2) { int idx1 = (int) o1.doubleValue(); int idx2 = (int) o2.doubleValue(); return -Double.compare(data[idx1].absoluteValue().doubleValue(), data[idx2].absoluteValue().doubleValue()); } }); for (int j = 0; j < vec.length(); j++) { vec.putScalar(j, data[(int) index[j].doubleValue()]); indexVector.putScalar(j, index[j]); } } ret[0] = indices; ret[1] = ndarray; return ret; }
From source file:org.nd4j.linalg.factory.Nd4j.java
/** * Sort an ndarray along a particular dimension * * @param ndarray the ndarray to sort/* www . j a v a2s . c om*/ * @param dimension the dimension to sort * @return the indices and the sorted ndarray */ public static INDArray[] sortWithIndices(INDArray ndarray, int dimension, boolean ascending) { INDArray indices = Nd4j.create(ndarray.shape()); INDArray[] ret = new INDArray[2]; for (int i = 0; i < ndarray.vectorsAlongDimension(dimension); i++) { INDArray vec = ndarray.vectorAlongDimension(i, dimension); INDArray indexVector = indices.vectorAlongDimension(i, dimension); final Double[] data = new Double[vec.length()]; final Double[] index = new Double[vec.length()]; for (int j = 0; j < vec.length(); j++) { data[j] = vec.getDouble(j); index[j] = (double) j; } /** * Inject a comparator that sorts indices relative to * the actual values in the data. * This allows us to retain the indices * and how they were rearranged. */ Arrays.sort(index, new Comparator<Double>() { @Override public int compare(Double o1, Double o2) { int o = (int) o1.doubleValue(); int oo2 = (int) o2.doubleValue(); return Double.compare(data[o], data[oo2]); } }); if (ascending) for (int j = 0; j < vec.length(); j++) { vec.putScalar(j, data[(int) index[j].doubleValue()]); indexVector.putScalar(j, index[j]); } else { int count = data.length - 1; for (int j = 0; j < vec.length(); j++) { int currCount2 = count; count--; vec.putScalar(j, data[(int) index[currCount2].doubleValue()]); indexVector.putScalar(j, index[currCount2]); } } } ret[0] = indices; ret[1] = ndarray; return ret; }