List of usage examples for java.util HashMap clear
public void clear()
From source file:us.deathmarine.fishchecker.FishChecker.java
@Override public boolean onCommand(final CommandSender sender, Command command, String label, final String[] args) { if (!sender.hasPermission(command.getPermission())) { sender.sendMessage(ChatColor.RED + "You do not have the required permissions."); return true; }//w ww. j a v a 2s.co m if (args.length < 1) return false; sender.sendMessage(ChatColor.GRAY + "Checking Fishbans for information on " + ChatColor.DARK_RED + args[0] + ChatColor.GRAY + " !"); this.getServer().getScheduler().runTaskAsynchronously(this, new Runnable() { @Override public void run() { try { HashMap<String, Object> map = new HashMap<String, Object>(); URL url = new URL("http://api.fishbans.com/bans/" + args[0].toLowerCase() + "/"); String line; StringBuilder builder = new StringBuilder(); BufferedReader reader = new BufferedReader(new InputStreamReader(url.openStream())); while ((line = reader.readLine()) != null) { builder.append(line); } JSONParser parser = new JSONParser(); Object obj = parser.parse(builder.toString()); reader.close(); JSONObject jsonObject = (JSONObject) obj; JSONObject bans = (JSONObject) jsonObject.get("bans"); if (bans == null) { sender.sendMessage("Player Does Not Exist."); return; } JSONObject service = (JSONObject) bans.get("service"); if (service == null) { sender.sendMessage("Player Does Not Exist."); return; } JSONObject mcbans = (JSONObject) service.get("mcbans"); if (mcbans == null) { sender.sendMessage("Mcbans: Player Not Found."); return; } long mcbansAmt = 0L; if (mcbans.get("bans") != null) mcbansAmt = getValue(mcbans.get("bans")); if (mcbansAmt > 0L) { JSONObject mcbansInfo = castToJSON(mcbans.get("ban_info")); sender.sendMessage(ChatColor.RED + "Mcbans: " + String.valueOf(mcbansAmt)); toJavaMap(mcbansInfo, map); if (mcbansInfo != null) outputHashMap(map, sender); map.clear(); } else { sender.sendMessage(ChatColor.GREEN + "Mcbans: " + String.valueOf(mcbansAmt)); } JSONObject mcbouncer = (JSONObject) service.get("mcbouncer"); if (mcbouncer == null) { sender.sendMessage("McBouncer: Player Not Found."); return; } long mcbouncerAmt = 0L; if (mcbouncer.get("bans") != null) mcbouncerAmt = getValue(mcbouncer.get("bans")); if (mcbouncerAmt > 0L) { JSONObject mcbouncerInfo = castToJSON(mcbouncer.get("ban_info")); sender.sendMessage(ChatColor.RED + "Mcbouncer: " + String.valueOf(mcbouncerAmt)); toJavaMap(mcbouncerInfo, map); if (mcbouncerInfo != null) outputHashMap(map, sender); map.clear(); } else { sender.sendMessage(ChatColor.GREEN + "Mcbouncer: " + String.valueOf(mcbouncerAmt)); } JSONObject mcblockit = (JSONObject) service.get("mcblockit"); if (mcblockit == null) { sender.sendMessage("McBlockit: Player Not Found."); return; } long mcblockitAmt = 0L; if (mcblockit.get("bans") != null) mcblockitAmt = getValue(mcblockit.get("bans")); if (mcblockitAmt > 0L) { JSONObject mcblockitInfo = castToJSON(mcblockit.get("ban_info")); sender.sendMessage(ChatColor.RED + "McBlockit: " + String.valueOf(mcblockitAmt)); toJavaMap(mcblockitInfo, map); if (mcblockitInfo != null) outputHashMap(map, sender); map.clear(); } else { sender.sendMessage(ChatColor.GREEN + "McBlockit: " + String.valueOf(mcblockitAmt)); } JSONObject minebans = (JSONObject) service.get("minebans"); if (minebans == null) { sender.sendMessage("Minebans: Player Not Found."); return; } long minebansAmt = 0L; if (minebans.get("bans") != null) minebansAmt = getValue(minebans.get("bans")); if (minebansAmt > 0L) { JSONObject minebansInfo = castToJSON(minebans.get("ban_info")); sender.sendMessage(ChatColor.RED + "Minebans: " + String.valueOf(minebansAmt)); toJavaMap(minebansInfo, map); if (minebansInfo != null) outputHashMap(map, sender); map.clear(); } else { sender.sendMessage(ChatColor.GREEN + "Minebans: " + String.valueOf(minebansAmt)); } JSONObject glizer = (JSONObject) service.get("glizer"); if (glizer == null) { sender.sendMessage("Glizer: Player Not Found."); return; } long glizerAmt = 0L; if (glizer.get("bans") != null) glizerAmt = getValue(glizer.get("bans")); if (glizerAmt > 0L) { JSONObject glizerInfo = castToJSON(glizer.get("ban_info")); sender.sendMessage(ChatColor.RED + "Glizer: " + String.valueOf(glizerAmt)); toJavaMap(glizerInfo, map); if (glizerInfo != null) outputHashMap(map, sender); map.clear(); } else { sender.sendMessage(ChatColor.GREEN + "Glizer: " + String.valueOf(glizerAmt)); } long sum = mcbansAmt + mcbouncerAmt + mcblockitAmt + minebansAmt + glizerAmt; String risk = ChatColor.GREEN + "Low"; if (sum > 0L) { risk = ChatColor.YELLOW + "Medium"; if (sum >= 5 && sum < 15) risk = ChatColor.RED + "High"; if (sum > 15) risk = ChatColor.DARK_RED + "Extreme"; } sender.sendMessage(ChatColor.GRAY + "Player Risk is status: " + risk); sender.sendMessage(ChatColor.GREEN + "See " + ChatColor.GRAY + "http://fishbans.com/u/" + args[0].toLowerCase() + "/"); } catch (Exception e) { e.printStackTrace(); } } }); return true; }
From source file:org.openscience.cdk.applications.taverna.weka.regression.EvaluateRegressionResultsAsPDFActivity.java
@Override public void work() throws Exception { // Get input//from w w w . j a v a 2 s . c o m String[] options = ((String) this.getConfiguration() .getAdditionalProperty(CDKTavernaConstants.PROPERTY_SCATTER_PLOT_OPTIONS)).split(";"); List<File> modelFiles = this.getInputAsFileList(this.INPUT_PORTS[0]); List<Instances> trainDatasets = this.getInputAsList(this.INPUT_PORTS[1], Instances.class); List<Instances> testDatasets = null; if (options[0].equals("" + TEST_TRAININGSET_PORT)) { testDatasets = this.getInputAsList(this.INPUT_PORTS[2], Instances.class); } else { testDatasets = null; } String directory = modelFiles.get(0).getParent(); // Do work ArrayList<String> resultFiles = new ArrayList<String>(); HashMap<UUID, Double> orgClassMap = new HashMap<UUID, Double>(); HashMap<UUID, Double> calcClassMap = new HashMap<UUID, Double>(); WekaTools tools = new WekaTools(); ChartTool chartTool = new ChartTool(); List<Object> rmseCharts = new ArrayList<Object>(); List<Double> trainMeanRMSE = new ArrayList<Double>(); List<Double> testMeanRMSE = new ArrayList<Double>(); List<Double> cvMeanRMSE = new ArrayList<Double>(); DefaultCategoryDataset[] ratioRMSESet = new DefaultCategoryDataset[trainDatasets.size()]; for (int i = 0; i < trainDatasets.size(); i++) { ratioRMSESet[i] = new DefaultCategoryDataset(); } List<Double> trainingSetRatios = null; int fileIDX = 1; while (!modelFiles.isEmpty()) { trainingSetRatios = new ArrayList<Double>(); List<Double> trainRMSE = new ArrayList<Double>(); HashSet<Integer> trainSkippedRMSE = new HashSet<Integer>(); List<Double> testRMSE = new ArrayList<Double>(); HashSet<Integer> testSkippedRMSE = new HashSet<Integer>(); List<Double> cvRMSE = new ArrayList<Double>(); HashSet<Integer> cvSkippedRMSE = new HashSet<Integer>(); List<Object> chartsObjects = new LinkedList<Object>(); File modelFile = null; Classifier classifier = null; String name = ""; for (int j = 0; j < trainDatasets.size(); j++) { LinkedList<Double> predictedValues = new LinkedList<Double>(); LinkedList<Double> orgValues = new LinkedList<Double>(); LinkedList<Double[]> yResidueValues = new LinkedList<Double[]>(); LinkedList<String> yResidueNames = new LinkedList<String>(); if (modelFiles.isEmpty()) { break; } calcClassMap.clear(); modelFile = modelFiles.remove(0); classifier = (Classifier) SerializationHelper.read(modelFile.getPath()); Instances testset = null; if (testDatasets != null) { testset = testDatasets.get(j); } name = classifier.getClass().getSimpleName(); String sum = "Method: " + name + " " + tools.getOptionsFromFile(modelFile, name) + "\n\n"; // Produce training set data Instances trainset = trainDatasets.get(j); Instances trainUUIDSet = Filter.useFilter(trainset, tools.getIDGetter(trainset)); trainset = Filter.useFilter(trainset, tools.getIDRemover(trainset)); double trainingSetRatio = 1.0; if (testset != null) { trainingSetRatio = trainset.numInstances() / (double) (trainset.numInstances() + testset.numInstances()); } trainingSetRatios.add(trainingSetRatio * 100); // Predict for (int k = 0; k < trainset.numInstances(); k++) { UUID uuid = UUID.fromString(trainUUIDSet.instance(k).stringValue(0)); orgClassMap.put(uuid, trainset.instance(k).classValue()); calcClassMap.put(uuid, classifier.classifyInstance(trainset.instance(k))); } // Evaluate Evaluation trainEval = new Evaluation(trainset); trainEval.evaluateModel(classifier, trainset); // Chart data DefaultXYDataset xyDataSet = new DefaultXYDataset(); String trainSeries = "Training Set (RMSE: " + String.format("%.2f", trainEval.rootMeanSquaredError()) + ")"; XYSeries series = new XYSeries(trainSeries); Double[] yTrainResidues = new Double[trainUUIDSet.numInstances()]; Double[] orgTrain = new Double[trainUUIDSet.numInstances()]; Double[] calc = new Double[trainUUIDSet.numInstances()]; for (int k = 0; k < trainUUIDSet.numInstances(); k++) { UUID uuid = UUID.fromString(trainUUIDSet.instance(k).stringValue(0)); orgTrain[k] = orgClassMap.get(uuid); calc[k] = calcClassMap.get(uuid); if (calc[k] != null && orgTrain[k] != null) { series.add(orgTrain[k].doubleValue(), calc[k]); yTrainResidues[k] = calc[k].doubleValue() - orgTrain[k].doubleValue(); } else { ErrorLogger.getInstance().writeError("Can't find value for UUID: " + uuid.toString(), this.getActivityName()); throw new CDKTavernaException(this.getActivityName(), "Can't find value for UUID: " + uuid.toString()); } } orgValues.addAll(Arrays.asList(orgTrain)); predictedValues.addAll(Arrays.asList(calc)); CollectionUtilities.sortTwoArrays(orgTrain, yTrainResidues); yResidueValues.add(yTrainResidues); yResidueNames.add(trainSeries); xyDataSet.addSeries(trainSeries, series.toArray()); // Summary sum += "Training Set:\n"; if (trainEval.rootRelativeSquaredError() > 300) { trainSkippedRMSE.add(j); } trainRMSE.add(trainEval.rootMeanSquaredError()); sum += trainEval.toSummaryString(true); // Produce test set data if (testset != null) { Instances testUUIDSet = Filter.useFilter(testset, tools.getIDGetter(testset)); testset = Filter.useFilter(testset, tools.getIDRemover(testset)); // Predict for (int k = 0; k < testset.numInstances(); k++) { UUID uuid = UUID.fromString(testUUIDSet.instance(k).stringValue(0)); orgClassMap.put(uuid, testset.instance(k).classValue()); calcClassMap.put(uuid, classifier.classifyInstance(testset.instance(k))); } // Evaluate Evaluation testEval = new Evaluation(testset); testEval.evaluateModel(classifier, testset); // Chart data String testSeries = "Test Set (RMSE: " + String.format("%.2f", testEval.rootMeanSquaredError()) + ")"; series = new XYSeries(testSeries); Double[] yTestResidues = new Double[testUUIDSet.numInstances()]; Double[] orgTest = new Double[testUUIDSet.numInstances()]; calc = new Double[testUUIDSet.numInstances()]; for (int k = 0; k < testUUIDSet.numInstances(); k++) { UUID uuid = UUID.fromString(testUUIDSet.instance(k).stringValue(0)); orgTest[k] = orgClassMap.get(uuid); calc[k] = calcClassMap.get(uuid); if (calc[k] != null && orgTest[k] != null) { series.add(orgTest[k].doubleValue(), calc[k].doubleValue()); yTestResidues[k] = calc[k].doubleValue() - orgTest[k].doubleValue(); } else { ErrorLogger.getInstance().writeError("Can't find value for UUID: " + uuid.toString(), this.getActivityName()); throw new CDKTavernaException(this.getActivityName(), "Can't find value for UUID: " + uuid.toString()); } } orgValues.addAll(Arrays.asList(orgTest)); predictedValues.addAll(Arrays.asList(calc)); CollectionUtilities.sortTwoArrays(orgTest, yTestResidues); yResidueValues.add(yTestResidues); yResidueNames.add(testSeries); xyDataSet.addSeries(testSeries, series.toArray()); // Create summary sum += "\nTest Set:\n"; if (testEval.rootRelativeSquaredError() > 300) { testSkippedRMSE.add(j); } testRMSE.add(testEval.rootMeanSquaredError()); sum += testEval.toSummaryString(true); } // Produce cross validation data if (Boolean.parseBoolean(options[1])) { Evaluation cvEval = new Evaluation(trainset); if (testset != null) { Instances fullSet = tools.getFullSet(trainset, testset); cvEval.crossValidateModel(classifier, fullSet, 10, new Random(1)); } else { cvEval.crossValidateModel(classifier, trainset, 10, new Random(1)); } sum += "\n10-fold cross-validation:\n"; if (cvEval.rootRelativeSquaredError() > 300) { cvSkippedRMSE.add(j); } cvRMSE.add(cvEval.rootMeanSquaredError()); sum += cvEval.toSummaryString(true); } // Create scatter plot String header = classifier.getClass().getSimpleName() + "\n Training set ratio: " + String.format("%.2f", trainingSetRatios.get(j)) + "%" + "\n Model name: " + modelFile.getName(); chartsObjects .add(chartTool.createScatterPlot(xyDataSet, header, "Original values", "Predicted values")); // Create residue plot chartsObjects.add(chartTool.createResiduePlot(yResidueValues, header, "Index", "(Predicted - Original)", yResidueNames)); // Create curve Double[] tmpOrg = new Double[orgValues.size()]; tmpOrg = orgValues.toArray(tmpOrg); Double[] tmpPred = new Double[predictedValues.size()]; tmpPred = predictedValues.toArray(tmpPred); CollectionUtilities.sortTwoArrays(tmpOrg, tmpPred); DefaultXYDataset dataSet = new DefaultXYDataset(); String orgName = "Original"; XYSeries orgSeries = new XYSeries(orgName); String predName = "Predicted"; XYSeries predSeries = new XYSeries(predName); for (int k = 0; k < tmpOrg.length; k++) { orgSeries.add((k + 1), tmpOrg[k]); predSeries.add((k + 1), tmpPred[k]); } dataSet.addSeries(orgName, orgSeries.toArray()); dataSet.addSeries(predName, predSeries.toArray()); chartsObjects.add(chartTool.createXYLineChart(header, "Index", "Value", dataSet, true, false)); // Add summary chartsObjects.add(sum); } // Create RMSE Plot DefaultCategoryDataset dataSet = new DefaultCategoryDataset(); double meanRMSE = 0; for (int i = 0; i < trainRMSE.size(); i++) { if (!trainSkippedRMSE.contains(i)) { dataSet.addValue(trainRMSE.get(i), "Training Set", "(" + String.format("%.2f", trainingSetRatios.get(i)) + "%/" + (i + 1) + ")"); ratioRMSESet[i].addValue(trainRMSE.get(i), "Training Set", "(" + String.format("%.2f", trainingSetRatios.get(i)) + "%/" + (i + 1) + "/" + fileIDX + ")"); } meanRMSE += trainRMSE.get(i); } trainMeanRMSE.add(meanRMSE / trainRMSE.size()); meanRMSE = 0; if (!testRMSE.isEmpty()) { for (int i = 0; i < testRMSE.size(); i++) { if (!testSkippedRMSE.contains(i)) { dataSet.addValue(testRMSE.get(i), "Test Set", "(" + String.format("%.2f", trainingSetRatios.get(i)) + "%/" + (i + 1) + ")"); ratioRMSESet[i].addValue(testRMSE.get(i), "Test Set", "(" + String.format("%.2f", trainingSetRatios.get(i)) + "%/" + (i + 1) + "/" + fileIDX + ")"); } meanRMSE += testRMSE.get(i); } testMeanRMSE.add(meanRMSE / testRMSE.size()); } meanRMSE = 0; if (!cvRMSE.isEmpty()) { for (int i = 0; i < cvRMSE.size(); i++) { if (!cvSkippedRMSE.contains(i)) { dataSet.addValue(cvRMSE.get(i), "10-fold Cross-validation", "(" + String.format("%.2f", trainingSetRatios.get(i)) + "%/" + (i + 1) + ")"); ratioRMSESet[i].addValue(cvRMSE.get(i), "10-fold Cross-validation", "(" + String.format("%.2f", trainingSetRatios.get(i)) + "%/" + (i + 1) + "/" + fileIDX + ")"); } meanRMSE += cvRMSE.get(i); } cvMeanRMSE.add(meanRMSE / cvRMSE.size()); } JFreeChart rmseChart = chartTool.createLineChart( "RMSE Plot\n Classifier:" + name + " " + tools.getOptionsFromFile(modelFile, name), "(Training set ratio/Set Index/File index)", "RMSE", dataSet, false, true); chartsObjects.add(rmseChart); rmseCharts.add(rmseChart); // Write PDF File file = FileNameGenerator.getNewFile(directory, ".pdf", "ScatterPlot"); chartTool.writeChartAsPDF(file, chartsObjects); resultFiles.add(file.getPath()); fileIDX++; } // Create set ratio RMSE plots for (int i = 0; i < ratioRMSESet.length; i++) { JFreeChart rmseChart = chartTool .createLineChart( "Set RMSE plot\n" + "(" + String.format("%.2f", trainingSetRatios.get(i)) + "%/" + (i + 1) + ")", "(Training set ratio/Index)", "RMSE", ratioRMSESet[i], false, true); rmseCharts.add(rmseChart); } // Create mean RMSE plot DefaultCategoryDataset dataSet = new DefaultCategoryDataset(); for (int i = 0; i < trainMeanRMSE.size(); i++) { dataSet.addValue(trainMeanRMSE.get(i), "Training Set", "" + (i + 1)); } for (int i = 0; i < testMeanRMSE.size(); i++) { dataSet.addValue(testMeanRMSE.get(i), "Test Set", "" + (i + 1)); } for (int i = 0; i < cvMeanRMSE.size(); i++) { dataSet.addValue(cvMeanRMSE.get(i), "10-fold Cross-validation", "" + (i + 1)); } JFreeChart rmseChart = chartTool.createLineChart("RMSE Mean Plot", "Dataset number", "Mean RMSE", dataSet); rmseCharts.add(rmseChart); File file = FileNameGenerator.getNewFile(directory, ".pdf", "RMSE-Sum"); chartTool.writeChartAsPDF(file, rmseCharts); resultFiles.add(file.getPath()); // Set output this.setOutputAsStringList(resultFiles, this.OUTPUT_PORTS[0]); }
From source file:com.bigpigs.fragments.SearchFragment.java
@Override public void onClick(View v) { int id = v.getId(); switch (id) { case R.id.bt_clear: { for (Polyline line : polylines) { line.remove();/*from w ww . j ava 2 s .com*/ } polylines.clear(); map.clear(); Log.d("size", polylines.size() + ""); reDrawMarker(); bt_clear.setVisibility(View.GONE); } case R.id.bt_currentLocation: { double lat = Double.parseDouble( getContext().getSharedPreferences("data", Context.MODE_PRIVATE).getString("lat", "0.0")); double lng = Double.parseDouble( getContext().getSharedPreferences("data", Context.MODE_PRIVATE).getString("lng", "0.0")); Log.d("latlng", lat + ":" + lng); currentLatLng = new LatLng(lat, lng); if (lat != 0.0 && lng != 0.0) { Utils.moveCamera(new LatLng(lat, lng), "Bn ang y", 13, map); } break; } case R.id.tv_time: { mHour = 0; mMinute = 0; TimePickerDialog dialog = new TimePickerDialog(getContext(), new TimePickerDialog.OnTimeSetListener() { @Override public void onTimeSet(TimePicker view, int hourOfDay, int minute) { mHour = hourOfDay; mMinute = minute; sHour = mHour + ""; sMinute = mMinute + ""; if (mHour < 10 && mMinute < 10) { tv_time.setText("0" + mHour + ":" + "0" + mMinute); } if (mHour < 10 && mMinute > 10) { tv_time.setText("0" + mHour + ":" + "" + mMinute); } if (mHour > 10 && mMinute < 10) { tv_time.setText("" + mHour + ":" + "0" + mMinute); } currentTime = tv_time.getText().toString(); HashMap<String, String> map = new HashMap<String, String>(); map.put("day", Calendar.getInstance().get(Calendar.YEAR) + "-" + (Calendar.getInstance().get(Calendar.MONTH) + 1) + "-" + Calendar.getInstance().get(Calendar.DAY_OF_MONTH)); map.put("time_start", currentTime); map.put("textlocation", search_box.getSelectedItem().toString()); new SearchSystemPitch(map).execute(); } }, 7, 00, true); dialog.setTitle("Ch?n gi? bt u bn mun"); dialog.show(); break; } // case R.id.bt_search : // { // map.clear(); // map.addMarker(new MarkerOptions().position(currentLatLng).title("Bn y")); // dateofweek = Calendar.getInstance().get(Calendar.DAY_OF_WEEK); // HashMap<String,String> body = new HashMap<>(); // body.put("time_start",tv_time.getText().toString()+":00"); // body.put("day", "2017-01-01"); // body.put("textlocation",search_box.getSelectedItem().toString()); // new SearchSystemPitch(body); // break; // // // } } }
From source file:org.alfresco.web.bean.wcm.CreateFormWizard.java
protected void saveRenderingEngineTemplate(final RenderingEngineTemplateData retd, final NodeRef formNodeRef) { if (LOGGER.isDebugEnabled()) LOGGER.debug("adding rendering engine template " + retd + " to form " + this.getFormName()); NodeRef renderingEngineTemplateNodeRef = this.getFileFolderService().searchSimple(formNodeRef, retd.getName());/* ww w .j a v a2 s. c o m*/ final HashMap<QName, Serializable> props = new HashMap<QName, Serializable>(); if (renderingEngineTemplateNodeRef == null) { try { final FileInfo fileInfo = this.getFileFolderService().create(formNodeRef, retd.getName(), ContentModel.TYPE_CONTENT); if (LOGGER.isDebugEnabled()) LOGGER.debug("Created file node for file: " + retd.getName()); renderingEngineTemplateNodeRef = fileInfo.getNodeRef(); } catch (final FileExistsException fee) { LOGGER.error(fee.getName() + " already exists in " + fee.getParentNodeRef()); throw fee; } // get a writer for the content and put the file final ContentWriter writer = this.getContentService().getWriter(renderingEngineTemplateNodeRef, ContentModel.PROP_CONTENT, true); // set the mimetype and encoding // XXXarielb mime type of template isn't known // writer.setMimetype("text/xml"); writer.setEncoding("UTF-8"); writer.putContent(retd.getFile()); this.getNodeService().createAssociation(formNodeRef, renderingEngineTemplateNodeRef, WCMAppModel.ASSOC_RENDERING_ENGINE_TEMPLATES); props.clear(); props.put(WCMAppModel.PROP_PARENT_RENDERING_ENGINE_NAME, retd.getRenderingEngine().getName()); props.put(WCMAppModel.PROP_FORM_SOURCE, formNodeRef); this.getNodeService().addAspect(renderingEngineTemplateNodeRef, WCMAppModel.ASPECT_RENDERING_ENGINE_TEMPLATE, props); // apply the titled aspect - title and description props.clear(); props.put(ContentModel.PROP_TITLE, retd.getTitle()); props.put(ContentModel.PROP_DESCRIPTION, retd.getDescription()); this.getNodeService().addAspect(renderingEngineTemplateNodeRef, ContentModel.ASPECT_TITLED, props); } if (LOGGER.isDebugEnabled()) LOGGER.debug("adding rendition properties to " + renderingEngineTemplateNodeRef); props.clear(); props.put(WCMAppModel.PROP_MIMETYPE_FOR_RENDITION, retd.getMimetypeForRendition()); final NodeRef rpNodeRef = this.getNodeService() .createNode(renderingEngineTemplateNodeRef, WCMAppModel.ASSOC_RENDITION_PROPERTIES, WCMAppModel.ASSOC_RENDITION_PROPERTIES, WCMAppModel.TYPE_RENDITION_PROPERTIES, props) .getChildRef(); props.clear(); props.put(WCMAppModel.PROP_OUTPUT_PATH_PATTERN, retd.getOutputPathPatternForRendition()); this.getNodeService().addAspect(rpNodeRef, WCMAppModel.ASPECT_OUTPUT_PATH_PATTERN, props); }
From source file:org.opendatakit.builder.CsvUtil.java
/** * Imports data from a csv file with elementKey headings. This csv file is * assumed to be under:/*from w ww.ja va 2 s. c om*/ * <ul> * <li>config/assets/csv/tableId.fileQualifier.csv</li> * </ul> * If the table does not exist, it attempts to create it using the schema and * metadata located here: * <ul> * <li>tables/tableId/definition.csv - data table definition</li> * <li>tables/tableId/properties.csv - key-value store</li> * </ul> * * @param importListener we tell this object our current status every 5 rows, and it updates * the user's progressdialog * @param tableId the id of the table to import * @param fileQualifier the optional prefix for the filename * @param createIfNotPresent whether we should try and create the table * @return whether we were successful * @throws ServicesAvailabilityException if the database is down */ public boolean importSeparable(ImportListener importListener, String tableId, String fileQualifier, boolean createIfNotPresent) throws ServicesAvailabilityException { DbHandle db = null; try { db = supervisor.getDatabase().openDatabase(appName); if (!supervisor.getDatabase().hasTableId(appName, db, tableId)) { if (createIfNotPresent) { updateTablePropertiesFromCsv(tableId); if (!supervisor.getDatabase().hasTableId(appName, db, tableId)) { return false; } } else { return false; } } OrderedColumns orderedDefns = supervisor.getDatabase().getUserDefinedColumns(appName, db, tableId); WebLogger.getLogger(appName).i(TAG, "importSeparable: tableId: " + tableId + " fileQualifier: " + (fileQualifier == null ? "<null>" : fileQualifier)); // reading data InputStreamReader input = null; try { File assetsCsvInstances = new File(ODKFileUtils.getAssetsCsvInstancesFolder(appName, tableId)); HashSet<File> instancesHavingData = new HashSet<>(); if (assetsCsvInstances.exists() && assetsCsvInstances.isDirectory()) { File[] subDirectories = assetsCsvInstances.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory() && pathname.list().length != 0; } }); instancesHavingData.addAll(Arrays.asList(subDirectories)); } // both files are read from config/assets/csv directory... File assetsCsv = new File(ODKFileUtils.getAssetsCsvFolder(appName)); // read data table... File file = new File(assetsCsv, tableId + (fileQualifier != null && !fileQualifier.isEmpty() ? "." + fileQualifier : "") + ".csv"); FileInputStream in = new FileInputStream(file); input = new InputStreamReader(in, CharEncoding.UTF_8); RFC4180CsvReader cr = new RFC4180CsvReader(input); // don't have to worry about quotes in elementKeys... String[] columnsInFile = cr.readNext(); int columnsInFileLength = countUpToLastNonNullElement(columnsInFile); String v_id; String v_form_id; String v_locale; String v_savepoint_type; String v_savepoint_creator; String v_savepoint_timestamp; String v_row_etag; String v_default_access; String v_row_owner; String v_group_read_only; String v_group_modify; String v_group_privileged; HashMap<String, String> valueMap = new HashMap<>(); int rowCount = 0; String[] row; while (true) { row = cr.readNext(); rowCount++; if (rowCount % 5 == 0) { importListener.updateProgressDetail(rowCount); } if (row == null || countUpToLastNonNullElement(row) == 0) { break; } int rowLength = countUpToLastNonNullElement(row); // default values for metadata columns if not provided v_id = UUID.randomUUID().toString(); v_form_id = null; v_locale = CursorUtils.DEFAULT_LOCALE; v_savepoint_type = SavepointTypeManipulator.complete(); v_savepoint_creator = CursorUtils.DEFAULT_CREATOR; v_savepoint_timestamp = TableConstants.nanoSecondsFromMillis(System.currentTimeMillis()); v_row_etag = null; v_default_access = DataTableColumns.DEFAULT_DEFAULT_ACCESS; v_row_owner = DataTableColumns.DEFAULT_ROW_OWNER; v_group_read_only = DataTableColumns.DEFAULT_GROUP_READ_ONLY; v_group_modify = DataTableColumns.DEFAULT_GROUP_MODDIFY; v_group_privileged = DataTableColumns.DEFAULT_GROUP_PRIVILEGED; // clear value map valueMap.clear(); boolean foundId = false; for (int i = 0; i < columnsInFileLength; ++i) { if (i >= rowLength) break; String column = columnsInFile[i]; String tmp = row[i]; if (DataTableColumns.ID.equals(column)) { if (tmp != null && !tmp.isEmpty()) { foundId = true; v_id = tmp; } continue; } if (DataTableColumns.FORM_ID.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_form_id = tmp; } continue; } if (DataTableColumns.LOCALE.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_locale = tmp; } continue; } if (DataTableColumns.SAVEPOINT_TYPE.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_savepoint_type = tmp; } continue; } if (DataTableColumns.SAVEPOINT_CREATOR.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_savepoint_creator = tmp; } continue; } if (DataTableColumns.SAVEPOINT_TIMESTAMP.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_savepoint_timestamp = tmp; } continue; } if (DataTableColumns.ROW_ETAG.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_row_etag = tmp; } continue; } if (DataTableColumns.DEFAULT_ACCESS.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_default_access = tmp; } continue; } if (DataTableColumns.ROW_OWNER.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_row_owner = tmp; } continue; } if (DataTableColumns.GROUP_READ_ONLY.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_group_read_only = tmp; } continue; } if (DataTableColumns.GROUP_MODIFY.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_group_modify = tmp; } continue; } if (DataTableColumns.GROUP_PRIVILEGED.equals(column)) { if (tmp != null && !tmp.isEmpty()) { v_group_privileged = tmp; } continue; } try { orderedDefns.find(column); valueMap.put(column, tmp); } catch (IllegalArgumentException ignored) { // this is OK -- // the csv contains an extra column } } // if there are any conflicts or checkpoints on this row, we do not import // this row change. Instead, silently ignore them. UserTable table = supervisor.getDatabase().privilegedGetRowsWithId(appName, db, tableId, orderedDefns, v_id); if (table.getNumberOfRows() > 1) { WebLogger.getLogger(appName).w(TAG, "importSeparable: tableId: " + tableId + " rowId: " + v_id + " has checkpoints or conflicts -- IGNORED in .csv"); continue; } SyncState syncState = null; if (foundId && table.getNumberOfRows() == 1) { String syncStateStr = table.getRowAtIndex(0).getDataByKey(DataTableColumns.SYNC_STATE); if (syncStateStr == null) { throw new IllegalStateException("Unexpected null syncState value"); } syncState = SyncState.valueOf(syncStateStr); } /* * Insertion will set the SYNC_STATE to new_row. * * If the table is sync'd to the server, this will cause one sync * interaction with the server to confirm that the server also has * this record. * * If a record with this same rowId already exists, if it is in an * new_row sync state, we update it here. Otherwise, if there were any * local changes, we leave the row unchanged. */ if (syncState != null) { ContentValues cv = new ContentValues(); for (String column : valueMap.keySet()) { if (column != null) { cv.put(column, valueMap.get(column)); } } // The admin columns get added here cv.put(DataTableColumns.FORM_ID, v_form_id); cv.put(DataTableColumns.LOCALE, v_locale); cv.put(DataTableColumns.SAVEPOINT_TYPE, v_savepoint_type); cv.put(DataTableColumns.SAVEPOINT_TIMESTAMP, v_savepoint_timestamp); cv.put(DataTableColumns.SAVEPOINT_CREATOR, v_savepoint_creator); cv.put(DataTableColumns.ROW_ETAG, v_row_etag); cv.put(DataTableColumns.DEFAULT_ACCESS, v_default_access); cv.put(DataTableColumns.ROW_OWNER, v_row_owner); cv.put(DataTableColumns.GROUP_READ_ONLY, v_group_read_only); cv.put(DataTableColumns.GROUP_MODIFY, v_group_modify); cv.put(DataTableColumns.GROUP_PRIVILEGED, v_group_privileged); cv.put(DataTableColumns.SYNC_STATE, SyncState.new_row.name()); cv.putNull(DataTableColumns.CONFLICT_TYPE); if (v_id != null) { cv.put(DataTableColumns.ID, v_id); } if (syncState == SyncState.new_row) { // delete the existing row then insert the new values for it supervisor.getDatabase().privilegedDeleteRowWithId(appName, db, tableId, orderedDefns, v_id); supervisor.getDatabase().privilegedInsertRowWithId(appName, db, tableId, orderedDefns, cv, v_id, true); } // otherwise, do NOT update the row. // i.e., if the row has been sync'd with // the server, then we don't revise it. } else { ContentValues cv = new ContentValues(); for (String column : valueMap.keySet()) { if (column != null) { cv.put(column, valueMap.get(column)); } } // The admin columns get added here cv.put(DataTableColumns.FORM_ID, v_form_id); cv.put(DataTableColumns.LOCALE, v_locale); cv.put(DataTableColumns.SAVEPOINT_TYPE, v_savepoint_type); cv.put(DataTableColumns.SAVEPOINT_TIMESTAMP, v_savepoint_timestamp); cv.put(DataTableColumns.SAVEPOINT_CREATOR, v_savepoint_creator); cv.put(DataTableColumns.ROW_ETAG, v_row_etag); cv.put(DataTableColumns.DEFAULT_ACCESS, v_default_access); cv.put(DataTableColumns.ROW_OWNER, v_row_owner); cv.put(DataTableColumns.GROUP_READ_ONLY, v_group_read_only); cv.put(DataTableColumns.GROUP_MODIFY, v_group_modify); cv.put(DataTableColumns.GROUP_PRIVILEGED, v_group_privileged); cv.put(DataTableColumns.SYNC_STATE, SyncState.new_row.name()); cv.putNull(DataTableColumns.CONFLICT_TYPE); if (v_id == null) { v_id = LocalizationUtils.genUUID(); } cv.put(DataTableColumns.ID, v_id); // imports assume super-user level powers. Treat these as if they were // directed by the server during a sync. supervisor.getDatabase().privilegedInsertRowWithId(appName, db, tableId, orderedDefns, cv, v_id, true); } /* * Copy all attachment files into the destination row. * The attachments are in instance-id-labeled sub-directories. * Anything in the corresponding subdirectory should be * referenced by the valuesMap above. If it isn't, don't worry about * it. This is a simplification. */ File assetsInstanceFolder = new File( ODKFileUtils.getAssetsCsvInstanceFolder(appName, tableId, v_id)); if (instancesHavingData.contains(assetsInstanceFolder)) { File tableInstanceFolder = new File(ODKFileUtils.getInstanceFolder(appName, tableId, v_id)); tableInstanceFolder.mkdirs(); ODKFileUtils.copyDirectory(assetsInstanceFolder, tableInstanceFolder); instancesHavingData.remove(assetsInstanceFolder); } } cr.close(); return true; } catch (IOException ignored) { return false; } finally { try { input.close(); } catch (IOException ignored) { // we never even opened the file } } } catch (IOException ignored) { return false; } finally { if (db != null) { supervisor.getDatabase().closeDatabase(appName, db); } } }
From source file:com.krawler.spring.hrms.common.hrmsCommonDAOImpl.java
public int insertConfigData(HttpServletRequest request, String formtype, String referenceid, String companyid) { int successflag = 0; KwlReturnObject result;/*from w w w. j av a 2s . c o m*/ try { HashMap<String, Object> requestParams = new HashMap<String, Object>(); requestParams.put("filter_names", Arrays.asList("formtype", "company.companyID")); requestParams.put("filter_values", Arrays.asList(formtype, companyid)); result = customcolDAOObj.getConfigType(requestParams); List lst = result.getEntityList(); Iterator ite = lst.iterator(); while (ite.hasNext()) { ConfigType contyp = (ConfigType) ite.next(); requestParams.clear(); requestParams.put("filter_names", Arrays.asList("referenceid")); requestParams.put("filter_values", Arrays.asList(referenceid)); result = customcolDAOObj.getConfigData(requestParams); List lst1 = result.getEntityList(); Iterator ite1 = lst1.iterator(); ConfigData condata = null; requestParams.clear(); if (ite1.hasNext()) { condata = (ConfigData) ite1.next(); requestParams.put("Id", condata.getId()); requestParams.put("Col" + contyp.getColnum(), request.getParameter(contyp.getName())); } else { requestParams.put("Referenceid", referenceid); requestParams.put("Col" + contyp.getColnum(), request.getParameter(contyp.getName())); } result = customcolDAOObj.addConfigData(requestParams); } } catch (Exception e) { return 0; } return successflag; }
From source file:com.krawler.spring.hrms.common.hrmsCommonDAOImpl.java
@Override public int insertConfigData(HttpServletRequest request, String formtype, String referenceid, String companyid, HashMap<String, Object> requestParams_extra) { int successflag = 0; KwlReturnObject result;/*from w ww . ja va 2s .c o m*/ try { HashMap<String, Object> requestParams = new HashMap<String, Object>(); Useraccount Useraccountobj = (Useraccount) requestParams_extra.get("Useraccount"); requestParams.put("filter_names", Arrays.asList("formtype", "company.companyID")); requestParams.put("filter_values", Arrays.asList(formtype, companyid)); result = customcolDAOObj.getConfigType(requestParams); List lst = result.getEntityList(); Iterator ite = lst.iterator(); while (ite.hasNext()) { ConfigType contyp = (ConfigType) ite.next(); requestParams.clear(); requestParams.put("filter_names", Arrays.asList("referenceid")); requestParams.put("filter_values", Arrays.asList(referenceid)); result = customcolDAOObj.getConfigData(requestParams); List lst1 = result.getEntityList(); Iterator ite1 = lst1.iterator(); ConfigData condata = null; requestParams.clear(); if (ite1.hasNext()) { condata = (ConfigData) ite1.next(); requestParams.put("Id", condata.getId()); requestParams.put("Col" + contyp.getColnum(), request.getParameter(contyp.getName())); } else { requestParams.put("Referenceid", referenceid); requestParams.put("Col" + contyp.getColnum(), request.getParameter(contyp.getName())); } result = customcolDAOObj.addConfigData(requestParams); if (result.getRecordTotalCount() > 0) Useraccountobj.setConfigdata((ConfigData) result.getEntityList().get(0)); } } catch (Exception e) { return 0; } return successflag; }
From source file:ddf.catalog.pubsub.PredicateTest.java
@Test public void testTemporal() throws Exception { String methodName = "testTemporal"; LOGGER.debug("*************** START: " + methodName + " *****************"); MockQuery query = new MockQuery(); DatatypeFactory df = DatatypeFactory.newInstance(); XMLGregorianCalendar start = df.newXMLGregorianCalendarDate(2011, 10, 25, 0); XMLGregorianCalendar end = df.newXMLGregorianCalendarDate(2011, 10, 27, 0); query.addTemporalFilter(start, end, Metacard.EFFECTIVE); SubscriptionFilterVisitor visitor = new SubscriptionFilterVisitor(); Predicate pred = (Predicate) query.getFilter().accept(visitor, null); LOGGER.debug("resulting predicate: " + pred); Filter filter = query.getFilter(); FilterTransformer transform = new FilterTransformer(); transform.setIndentation(2);/*from www . j av a 2 s . c o m*/ String filterXml = transform.transform(filter); LOGGER.debug(filterXml); // input that passes temporal LOGGER.debug("\npass temporal.\n"); MetacardImpl metacard = new MetacardImpl(); metacard.setCreatedDate(new Date()); metacard.setExpirationDate(new Date()); metacard.setModifiedDate(new Date()); metacard.setMetadata(TestDataLibrary.getCatAndDogEntry()); XMLGregorianCalendar cal = df.newXMLGregorianCalendarDate(2011, 10, 26, 0); Date effectiveDate = cal.toGregorianCalendar().getTime(); metacard.setEffectiveDate(effectiveDate); HashMap<String, Object> properties = new HashMap<>(); properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE); Map<String, Object> contextualMap = constructContextualMap(metacard); properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap); // Above Pulled from PubSubProviderImpl properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard); Event testEvent = new Event("topic", properties); boolean b = pred.matches(testEvent); assertTrue(b); // input that fails temporal LOGGER.debug("\nfail temporal. fail content type.\n"); XMLGregorianCalendar cal1 = df.newXMLGregorianCalendarDate(2012, 10, 30, 0); // time out of // range Date effectiveDate1 = cal1.toGregorianCalendar().getTime(); metacard.setEffectiveDate(effectiveDate1); LOGGER.debug("metacard date: " + metacard.getEffectiveDate()); properties.clear(); properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE); properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap); properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard); testEvent = new Event("topic", properties); assertFalse(pred.matches(testEvent)); LOGGER.debug("*************** END: " + methodName + " *****************"); }
From source file:ddf.catalog.pubsub.PredicateTest.java
@Test public void testContentTypeFilterTypeOnly() throws Exception { String methodName = "testContentTypeFilterTypeOnly"; LOGGER.debug("*************** START: " + methodName + " *****************"); MetacardImpl metacard = new MetacardImpl(); metacard.setMetadata(TestDataLibrary.getCatAndDogEntry()); String type1 = "type_1"; List<MockTypeVersionsExtension> extensions = new ArrayList<>(); MockTypeVersionsExtension ext1 = new MockTypeVersionsExtension(); ext1.setExtensionTypeName(type1);//from ww w . j av a2s . c om extensions.add(ext1); MockQuery query = new MockQuery(); query.addTypeFilter(extensions); SubscriptionFilterVisitor visitor = new SubscriptionFilterVisitor(); ContentTypePredicate pred = (ContentTypePredicate) query.getFilter().accept(visitor, null); assertEquals(type1, pred.getType()); assertNull(pred.getVersion()); HashMap<String, Object> properties = new HashMap<>(); properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE); Map<String, Object> contextualMap = constructContextualMap(metacard); properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap); // Above Pulled from PubSubProviderImpl // handle null case properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, null); Event testEvent = new Event("topic", properties); assertFalse(pred.matches(testEvent)); // handle content type properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, type1 + ","); testEvent = new Event("topic", properties); assertTrue(pred.matches(testEvent)); // handle content version that matches content type properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, "," + type1); testEvent = new Event("topic", properties); assertFalse(pred.matches(testEvent)); properties.clear(); properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE); properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap); properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, type1 + "," + "random_version"); testEvent = new Event("topic", properties); assertTrue(pred.matches(testEvent)); properties.clear(); properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE); properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap); properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, "unmatchingtype" + "," + "random_version"); testEvent = new Event("topic", properties); assertFalse(pred.matches(testEvent)); properties.clear(); properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE); properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap); properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, "," + "unmatchingversion"); // Invalid // input testEvent = new Event("topic", properties); assertFalse(pred.matches(testEvent)); LOGGER.debug("*************** END: " + methodName + " *****************"); }
From source file:carskit.alg.cars.transformation.hybridfiltering.DCW.java
protected double predict(int a, int t, int c, DenseVector position) throws Exception { double pred = 0; double[] pos = position.getData(); double[] pos_1 = new double[num_dim]; double[] pos_2 = new double[num_dim]; double[] pos_3 = new double[num_dim]; for (int i = 0; i < pos.length; ++i) { if (i < num_dim) pos_1[i] = pos[i];/*from ww w .jav a2s . c o m*/ else if (i < 2 * num_dim) pos_2[i - num_dim] = pos[i]; else pos_3[i - 2 * num_dim] = pos[i]; } double part3 = 0, part3_count = 0; HashMap<Integer, Double> part22 = new HashMap<Integer, Double>(); HashMap<Integer, Double> part22_count = new HashMap<Integer, Double>(); HashMap<Integer, Double> part21 = new HashMap<>(); HashMap<Integer, Double> nns = new HashMap<>(); // key = ngbr id, value = sim for (MatrixEntry me : trainMatrix) { int ui = me.row(); // user-item int u = rateDao.getUserIdFromUI(ui); int ctx = me.column(); // context double rujc = me.get(); // real rating if (u == a) { double sim = ContextSimilarity(c, ctx, pos_3); if (sim >= th) { part3 += sim * rujc; part3_count += sim; } } else { double simu = userCorrs.get(a, u); if (simu > 0) { // a potential neighbor int j = rateDao.getItemIdFromUI(ui); // double check whether this user has rated item t in c1 int newui = rateDao.getUserItemId(u + "," + t); if (newui != -1) { SparseVector sv = trainMatrix.row(newui); if (sv != null) { // user has rated item t in some contexts int[] cs = sv.getIndex(); if (ContextMatch(c, cs, pos_1)) { // this user is a successful neighbor nns.put(u, simu); // get value for part21 double rate = ContextWeight(c, cs, pos_2, sv); if (rate == -1) { // if not rating profiles with a simlarity larger than threshold rate = train.get(u, t); } part21.put(u, rate); } } } } } } // get user average if (part3_count == 0) part3 = userMeans.get(a); else part3 /= part3_count; pred += part3; // start calculations // top-N neighbors List<Map.Entry<Integer, Double>> sorted = Lists.sortMap(nns, true); int k = nns.size(); if (k != 0) { k = (k > knn) ? knn : k; List<Map.Entry<Integer, Double>> subset = sorted.subList(0, k); nns.clear(); for (Map.Entry<Integer, Double> kv : subset) nns.put(kv.getKey(), kv.getValue()); // calculate part22 first List<Integer> uiids = trainMatrix.rows(); for (int uiid : uiids) { int u = rateDao.getUserIdFromUI(uiid); if (nns.containsKey(u)) { SparseVector sv = trainMatrix.row(uiid); int[] cs = sv.getIndex(); for (int ctx : cs) { double sim = ContextSimilarity(c, ctx, pos_2); double r = sv.get(ctx); if (part22.containsKey(u)) { part22.put(u, part22.get(u) + sim * r); part22_count.put(u, part22_count.get(u) + sim); } else { part22.put(u, sim * r); part22_count.put(u, sim); } } } } double sum1 = 0; double sum2 = 0; for (Map.Entry<Integer, Double> en : nns.entrySet()) { int ngbr = en.getKey(); sum2 += en.getValue(); double tmp = 0; if (part22.containsKey(ngbr)) tmp = part22.get(ngbr) / part22_count.get(ngbr); else tmp = userMeans.get(ngbr); sum1 += en.getValue() * (part21.get(ngbr) - tmp); } pred += sum1 / sum2; } return pred; }