List of usage examples for java.util HashMap keySet
public Set<K> keySet()
From source file:hyperheuristics.main.comparisons.ComputeIndicators.java
public static void main(String[] args) throws IOException, InterruptedException { int[] numberOfObjectivesArray = new int[] { 2, 4 }; String[] problems = new String[] { "OO_MyBatis", "OA_AJHsqldb", "OA_AJHotDraw", "OO_BCEL", "OO_JHotDraw", "OA_HealthWatcher", // "OA_TollSystems", "OO_JBoss" }; String[] heuristicFunctions = new String[] { LowLevelHeuristic.CHOICE_FUNCTION, LowLevelHeuristic.MULTI_ARMED_BANDIT, LowLevelHeuristic.RANDOM }; String[] algorithms = new String[] { "NSGA-II", // "SPEA2" };// w ww .j a v a2 s . c om MetricsUtil metricsUtil = new MetricsUtil(); DecimalFormat decimalFormatter = new DecimalFormat("0.00E0"); Mean mean = new Mean(); StandardDeviation standardDeviation = new StandardDeviation(); InvertedGenerationalDistance igd = new InvertedGenerationalDistance(); GenerationalDistance gd = new GenerationalDistance(); Spread spread = new Spread(); Coverage coverage = new Coverage(); for (int objectives : numberOfObjectivesArray) { try (FileWriter IGDWriter = new FileWriter("experiment/IGD_" + objectives + ".tex"); FileWriter spreadWriter = new FileWriter("experiment/SPREAD_" + objectives + ".tex"); FileWriter GDWriter = new FileWriter("experiment/GD_" + objectives + ".tex"); FileWriter coverageWriter = new FileWriter("experiment/COVERAGE_" + objectives + ".tex")) { StringBuilder latexTableBuilder = new StringBuilder(); latexTableBuilder.append("\\documentclass{paper}\n").append("\n") .append("\\usepackage[T1]{fontenc}\n").append("\\usepackage[latin1]{inputenc}\n") .append("\\usepackage[hidelinks]{hyperref}\n").append("\\usepackage{tabulary}\n") .append("\\usepackage{booktabs}\n").append("\\usepackage{multirow}\n") .append("\\usepackage{amsmath}\n").append("\\usepackage{mathtools}\n") .append("\\usepackage{graphicx}\n").append("\\usepackage{array}\n") .append("\\usepackage[linesnumbered,ruled,inoutnumbered]{algorithm2e}\n") .append("\\usepackage{subfigure}\n").append("\\usepackage[hypcap]{caption}\n") .append("\\usepackage{pdflscape}\n").append("\n").append("\\begin{document}\n").append("\n") .append("\\begin{landscape}\n").append("\n"); pfKnown: { latexTableBuilder.append("\\begin{table}[!htb]\n").append("\t\\centering\n") .append("\t\\def\\arraystretch{1.5}\n") // .append("\t\\setlength{\\tabcolsep}{10pt}\n") // .append("\t\\fontsize{8pt}{10pt}\\selectfont\n") .append("\t\\caption{INDICATOR found for $PF_{known}$ for ").append(objectives) .append(" objectives}\n").append("\t\\label{tab:INDICATOR ").append(objectives) .append(" objectives}\n").append("\t\\begin{tabulary}{\\linewidth}{c"); for (String algorithm : algorithms) { latexTableBuilder.append("c"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append("c"); } } latexTableBuilder.append("}\n").append("\t\t\\toprule\n").append("\t\t\\textbf{System}"); for (String algorithm : algorithms) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("}"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("-") .append(heuristicFunction).append("}"); } } latexTableBuilder.append("\\\\\n").append("\t\t\\midrule\n"); for (String problem : problems) { NonDominatedSolutionList trueFront = new NonDominatedSolutionList(); pfTrueComposing: { for (String algorithm : algorithms) { SolutionSet mecbaFront = metricsUtil.readNonDominatedSolutionSet( "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); trueFront.addAll(mecbaFront); for (String hyperHeuristic : heuristicFunctions) { SolutionSet front = metricsUtil.readNonDominatedSolutionSet( "experiment/" + algorithm + "/" + objectives + "objectives/" + hyperHeuristic + "/" + problem + "/FUN.txt"); trueFront.addAll(front); } } } double[][] trueFrontMatrix = trueFront.writeObjectivesToMatrix(); HashMap<String, Double> igdMap = new HashMap<>(); HashMap<String, Double> gdMap = new HashMap<>(); HashMap<String, Double> spreadMap = new HashMap<>(); HashMap<String, Double> coverageMap = new HashMap<>(); for (String algorithm : algorithms) { double[][] mecbaFront = metricsUtil .readFront("resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); igdMap.put(algorithm, igd.invertedGenerationalDistance(mecbaFront, trueFrontMatrix, objectives)); gdMap.put(algorithm, gd.generationalDistance(mecbaFront, trueFrontMatrix, objectives)); spreadMap.put(algorithm, spread.spread(mecbaFront, trueFrontMatrix, objectives)); coverageMap.put(algorithm, coverage.coverage(mecbaFront, trueFrontMatrix)); for (String heuristic : heuristicFunctions) { double[][] heuristicFront = metricsUtil.readFront("experiment/" + algorithm + "/" + objectives + "objectives/" + heuristic + "/" + problem + "/FUN.txt"); igdMap.put(algorithm + "-" + heuristic, igd .invertedGenerationalDistance(heuristicFront, trueFrontMatrix, objectives)); gdMap.put(algorithm + "-" + heuristic, gd.generationalDistance(heuristicFront, trueFrontMatrix, objectives)); spreadMap.put(algorithm + "-" + heuristic, spread.spread(heuristicFront, trueFrontMatrix, objectives)); coverageMap.put(algorithm + "-" + heuristic, coverage.coverage(heuristicFront, trueFrontMatrix)); } } latexTableBuilder.append("\t\t").append(problem); String latexTable = latexTableBuilder.toString(); latexTableBuilder = new StringBuilder(); latexTable = latexTable.replaceAll("O[OA]\\_", "").replaceAll("ChoiceFunction", "CF") .replaceAll("MultiArmedBandit", "MAB"); IGDWriter.write(latexTable.replaceAll("INDICATOR", "IGD")); spreadWriter.write(latexTable.replaceAll("INDICATOR", "Spread")); GDWriter.write(latexTable.replaceAll("INDICATOR", "GD")); coverageWriter.write(latexTable.replaceAll("INDICATOR", "Coverage")); String bestHeuristicIGD = "NULL"; String bestHeuristicGD = "NULL"; String bestHeuristicSpread = "NULL"; String bestHeuristicCoverage = "NULL"; getBest: { double bestMeanIGD = Double.POSITIVE_INFINITY; double bestMeanGD = Double.POSITIVE_INFINITY; double bestMeanSpread = Double.NEGATIVE_INFINITY; double bestMeanCoverage = Double.NEGATIVE_INFINITY; for (String heuristic : igdMap.keySet()) { double heuristicIGD = igdMap.get(heuristic); double heuristicGD = gdMap.get(heuristic); double heuristicSpread = spreadMap.get(heuristic); double heuristicCoverage = coverageMap.get(heuristic); if (heuristicIGD < bestMeanIGD) { bestMeanIGD = heuristicIGD; bestHeuristicIGD = heuristic; } if (heuristicGD < bestMeanGD) { bestMeanGD = heuristicGD; bestHeuristicGD = heuristic; } if (heuristicSpread > bestMeanSpread) { bestMeanSpread = heuristicSpread; bestHeuristicSpread = heuristic; } if (heuristicCoverage > bestMeanCoverage) { bestMeanCoverage = heuristicCoverage; bestHeuristicCoverage = heuristic; } } } StringBuilder igdBuilder = new StringBuilder(); StringBuilder gdBuilder = new StringBuilder(); StringBuilder spreadBuilder = new StringBuilder(); StringBuilder coverageBuilder = new StringBuilder(); String[] newHeuristicFunctions = new String[heuristicFunctions.length * algorithms.length + algorithms.length]; fulfillNewHeuristics: { int i = 0; for (String algorithm : algorithms) { newHeuristicFunctions[i++] = algorithm; for (String heuristicFunction : heuristicFunctions) { newHeuristicFunctions[i++] = algorithm + "-" + heuristicFunction; } } } for (String heuristic : newHeuristicFunctions) { igdBuilder.append(" & "); boolean bold = heuristic.equals(bestHeuristicIGD) || igdMap.get(heuristic).equals(igdMap.get(bestHeuristicIGD)); if (bold) { igdBuilder.append("\\textbf{"); } igdBuilder.append(decimalFormatter.format(igdMap.get(heuristic))); if (bold) { igdBuilder.append("}"); } gdBuilder.append(" & "); bold = heuristic.equals(bestHeuristicGD) || gdMap.get(heuristic).equals(gdMap.get(bestHeuristicGD)); if (bold) { gdBuilder.append("\\textbf{"); } gdBuilder.append(decimalFormatter.format(gdMap.get(heuristic))); if (bold) { gdBuilder.append("}"); } spreadBuilder.append(" & "); bold = heuristic.equals(bestHeuristicSpread) || spreadMap.get(heuristic).equals(spreadMap.get(bestHeuristicSpread)); if (bold) { spreadBuilder.append("\\textbf{"); } spreadBuilder.append(decimalFormatter.format(spreadMap.get(heuristic))); if (bold) { spreadBuilder.append("}"); } coverageBuilder.append(" & "); bold = heuristic.equals(bestHeuristicCoverage) || coverageMap.get(heuristic).equals(coverageMap.get(bestHeuristicCoverage)); if (bold) { coverageBuilder.append("\\textbf{"); } coverageBuilder.append(decimalFormatter.format(coverageMap.get(heuristic))); if (bold) { coverageBuilder.append("}"); } } IGDWriter.write(igdBuilder + "\\\\\n"); spreadWriter.write(spreadBuilder + "\\\\\n"); GDWriter.write(gdBuilder + "\\\\\n"); coverageWriter.write(coverageBuilder + "\\\\\n"); } latexTableBuilder = new StringBuilder(); latexTableBuilder.append("\t\t\\bottomrule\n").append("\t\\end{tabulary}\n") .append("\\end{table}\n\n"); } averages: { latexTableBuilder.append("\\begin{table}[!htb]\n").append("\t\\centering\n") .append("\t\\def\\arraystretch{1.5}\n") // .append("\t\\setlength{\\tabcolsep}{10pt}\n") // .append("\t\\fontsize{8pt}{10pt}\\selectfont\n") .append("\t\\caption{INDICATOR averages found for ").append(objectives) .append(" objectives}\n").append("\t\\label{tab:INDICATOR ").append(objectives) .append(" objectives}\n").append("\t\\begin{tabulary}{\\linewidth}{c"); for (String algorithm : algorithms) { latexTableBuilder.append("c"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append("c"); } } latexTableBuilder.append("}\n").append("\t\t\\toprule\n").append("\t\t\\textbf{System}"); for (String algorithm : algorithms) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("}"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("-") .append(heuristicFunction).append("}"); } } latexTableBuilder.append("\\\\\n").append("\t\t\\midrule\n"); for (String problem : problems) { NonDominatedSolutionList trueFront = new NonDominatedSolutionList(); pfTrueComposing: { for (String algorithm : algorithms) { SolutionSet mecbaFront = metricsUtil.readNonDominatedSolutionSet( "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); trueFront.addAll(mecbaFront); for (String hyperHeuristic : heuristicFunctions) { SolutionSet front = metricsUtil.readNonDominatedSolutionSet( "experiment/" + algorithm + "/" + objectives + "objectives/" + hyperHeuristic + "/" + problem + "/FUN.txt"); trueFront.addAll(front); } } } double[][] trueFrontMatrix = trueFront.writeObjectivesToMatrix(); HashMap<String, double[]> igdMap = new HashMap<>(); HashMap<String, double[]> gdMap = new HashMap<>(); HashMap<String, double[]> spreadMap = new HashMap<>(); HashMap<String, double[]> coverageMap = new HashMap<>(); mocaito: { for (String algorithm : algorithms) { double[] mecbaIGDs = new double[EXECUTIONS]; double[] mecbaGDs = new double[EXECUTIONS]; double[] mecbaSpreads = new double[EXECUTIONS]; double[] mecbaCoverages = new double[EXECUTIONS]; for (int i = 0; i < EXECUTIONS; i++) { double[][] mecbaFront = metricsUtil.readFront("resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem + "-" + i + ".NaoDominadas"); mecbaIGDs[i] = igd.invertedGenerationalDistance(mecbaFront, trueFrontMatrix, objectives); mecbaGDs[i] = gd.generationalDistance(mecbaFront, trueFrontMatrix, objectives); mecbaSpreads[i] = spread.spread(mecbaFront, trueFrontMatrix, objectives); mecbaCoverages[i] = coverage.coverage(mecbaFront, trueFrontMatrix); } igdMap.put(algorithm, mecbaIGDs); gdMap.put(algorithm, mecbaGDs); spreadMap.put(algorithm, mecbaSpreads); coverageMap.put(algorithm, mecbaCoverages); } } for (String algorithm : algorithms) { for (String heuristic : heuristicFunctions) { double[] hhIGDs = new double[EXECUTIONS]; double[] hhGDs = new double[EXECUTIONS]; double[] hhSpreads = new double[EXECUTIONS]; double[] hhCoverages = new double[EXECUTIONS]; for (int i = 0; i < EXECUTIONS; i++) { double[][] hhFront = metricsUtil .readFront("experiment/" + algorithm + "/" + objectives + "objectives/" + heuristic + "/" + problem + "/EXECUTION_" + i + "/FUN.txt"); hhIGDs[i] = igd.invertedGenerationalDistance(hhFront, trueFrontMatrix, objectives); hhGDs[i] = gd.generationalDistance(hhFront, trueFrontMatrix, objectives); hhSpreads[i] = spread.spread(hhFront, trueFrontMatrix, objectives); hhCoverages[i] = coverage.coverage(hhFront, trueFrontMatrix); } igdMap.put(algorithm + "-" + heuristic, hhIGDs); gdMap.put(algorithm + "-" + heuristic, hhGDs); spreadMap.put(algorithm + "-" + heuristic, hhSpreads); coverageMap.put(algorithm + "-" + heuristic, hhCoverages); } } HashMap<String, HashMap<String, Boolean>> igdResult = KruskalWallisTest.test(igdMap); HashMap<String, HashMap<String, Boolean>> gdResult = KruskalWallisTest.test(gdMap); HashMap<String, HashMap<String, Boolean>> spreadResult = KruskalWallisTest.test(spreadMap); HashMap<String, HashMap<String, Boolean>> coverageResult = KruskalWallisTest .test(coverageMap); latexTableBuilder.append("\t\t").append(problem); String latexTable = latexTableBuilder.toString(); latexTable = latexTable.replaceAll("O[OA]\\_", "").replaceAll("ChoiceFunction", "CF") .replaceAll("MultiArmedBandit", "MAB"); IGDWriter.write(latexTable.replaceAll("INDICATOR", "IGD")); spreadWriter.write(latexTable.replaceAll("INDICATOR", "Spread")); GDWriter.write(latexTable.replaceAll("INDICATOR", "GD")); coverageWriter.write(latexTable.replaceAll("INDICATOR", "Coverage")); latexTableBuilder = new StringBuilder(); String bestHeuristicIGD = "NULL"; String bestHeuristicGD = "NULL"; String bestHeuristicSpread = "NULL"; String bestHeuristicCoverage = "NULL"; getBest: { double bestMeanIGD = Double.POSITIVE_INFINITY; double bestMeanGD = Double.POSITIVE_INFINITY; double bestMeanSpread = Double.NEGATIVE_INFINITY; double bestMeanCoverage = Double.NEGATIVE_INFINITY; for (String heuristic : igdMap.keySet()) { double heuristicMeanIGD = mean.evaluate(igdMap.get(heuristic)); double heuristicMeanGD = mean.evaluate(gdMap.get(heuristic)); double heuristicMeanSpread = mean.evaluate(spreadMap.get(heuristic)); double heuristicMeanCoverage = mean.evaluate(coverageMap.get(heuristic)); if (heuristicMeanIGD < bestMeanIGD) { bestMeanIGD = heuristicMeanIGD; bestHeuristicIGD = heuristic; } if (heuristicMeanGD < bestMeanGD) { bestMeanGD = heuristicMeanGD; bestHeuristicGD = heuristic; } if (heuristicMeanSpread > bestMeanSpread) { bestMeanSpread = heuristicMeanSpread; bestHeuristicSpread = heuristic; } if (heuristicMeanCoverage > bestMeanCoverage) { bestMeanCoverage = heuristicMeanCoverage; bestHeuristicCoverage = heuristic; } } } StringBuilder igdBuilder = new StringBuilder(); StringBuilder gdBuilder = new StringBuilder(); StringBuilder spreadBuilder = new StringBuilder(); StringBuilder coverageBuilder = new StringBuilder(); String[] newHeuristicFunctions = new String[heuristicFunctions.length * algorithms.length + algorithms.length]; fulfillNewHeuristics: { int i = 0; for (String algorithm : algorithms) { newHeuristicFunctions[i++] = algorithm; for (String heuristicFunction : heuristicFunctions) { newHeuristicFunctions[i++] = algorithm + "-" + heuristicFunction; } } } for (String heuristic : newHeuristicFunctions) { igdBuilder.append(" & "); boolean bold = heuristic.equals(bestHeuristicIGD) || !igdResult.get(heuristic).get(bestHeuristicIGD); if (bold) { igdBuilder.append("\\textbf{"); } igdBuilder.append(decimalFormatter.format(mean.evaluate(igdMap.get(heuristic))) + " (" + decimalFormatter.format(standardDeviation.evaluate(igdMap.get(heuristic))) + ")"); if (bold) { igdBuilder.append("}"); } gdBuilder.append(" & "); bold = heuristic.equals(bestHeuristicGD) || !gdResult.get(heuristic).get(bestHeuristicGD); if (bold) { gdBuilder.append("\\textbf{"); } gdBuilder.append(decimalFormatter.format(mean.evaluate(gdMap.get(heuristic))) + " (" + decimalFormatter.format(standardDeviation.evaluate(gdMap.get(heuristic))) + ")"); if (bold) { gdBuilder.append("}"); } spreadBuilder.append(" & "); bold = heuristic.equals(bestHeuristicSpread) || !spreadResult.get(heuristic).get(bestHeuristicSpread); if (bold) { spreadBuilder.append("\\textbf{"); } spreadBuilder.append(decimalFormatter.format(mean.evaluate(spreadMap.get(heuristic))) + " (" + decimalFormatter.format(standardDeviation.evaluate(spreadMap.get(heuristic))) + ")"); if (bold) { spreadBuilder.append("}"); } coverageBuilder.append(" & "); bold = heuristic.equals(bestHeuristicCoverage) || !coverageResult.get(heuristic).get(bestHeuristicCoverage); if (bold) { coverageBuilder.append("\\textbf{"); } coverageBuilder .append(decimalFormatter.format(mean.evaluate(coverageMap.get(heuristic)))) .append(" (") .append(decimalFormatter .format(standardDeviation.evaluate(coverageMap.get(heuristic)))) .append(")"); if (bold) { coverageBuilder.append("}"); } } IGDWriter.write(igdBuilder + "\\\\\n"); spreadWriter.write(spreadBuilder + "\\\\\n"); GDWriter.write(gdBuilder + "\\\\\n"); coverageWriter.write(coverageBuilder + "\\\\\n"); } latexTableBuilder.append("\t\t\\bottomrule\n").append("\t\\end{tabulary}\n") .append("\\end{table}\n\n"); } latexTableBuilder.append("\\end{landscape}\n\n").append("\\end{document}"); String latexTable = latexTableBuilder.toString(); IGDWriter.write(latexTable); spreadWriter.write(latexTable); GDWriter.write(latexTable); coverageWriter.write(latexTable); } } }
From source file:edu.nyu.vida.data_polygamy.feature_identification.IndexCreation.java
/** * @param args//from w w w.j a va2s . co m */ @SuppressWarnings({ "deprecation" }) public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Options options = new Options(); Option forceOption = new Option("f", "force", false, "force the computation of the index and events " + "even if files already exist"); forceOption.setRequired(false); options.addOption(forceOption); Option thresholdOption = new Option("t", "use-custom-thresholds", false, "use custom thresholds for regular and rare events, defined in HDFS_HOME/" + FrameworkUtils.thresholdDir + " file"); thresholdOption.setRequired(false); options.addOption(thresholdOption); Option gOption = new Option("g", "group", true, "set group of datasets for which the indices and events" + " will be computed"); gOption.setRequired(true); gOption.setArgName("GROUP"); gOption.setArgs(Option.UNLIMITED_VALUES); options.addOption(gOption); Option machineOption = new Option("m", "machine", true, "machine identifier"); machineOption.setRequired(true); machineOption.setArgName("MACHINE"); machineOption.setArgs(1); options.addOption(machineOption); Option nodesOption = new Option("n", "nodes", true, "number of nodes"); nodesOption.setRequired(true); nodesOption.setArgName("NODES"); nodesOption.setArgs(1); options.addOption(nodesOption); Option s3Option = new Option("s3", "s3", false, "data on Amazon S3"); s3Option.setRequired(false); options.addOption(s3Option); Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true, "aws access key id; " + "this is required if the execution is on aws"); awsAccessKeyIdOption.setRequired(false); awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID"); awsAccessKeyIdOption.setArgs(1); options.addOption(awsAccessKeyIdOption); Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true, "aws secrect access key; " + "this is required if the execution is on aws"); awsSecretAccessKeyOption.setRequired(false); awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY"); awsSecretAccessKeyOption.setArgs(1); options.addOption(awsSecretAccessKeyOption); Option bucketOption = new Option("b", "s3-bucket", true, "bucket on s3; " + "this is required if the execution is on aws"); bucketOption.setRequired(false); bucketOption.setArgName("S3-BUCKET"); bucketOption.setArgs(1); options.addOption(bucketOption); Option helpOption = new Option("h", "help", false, "display this message"); helpOption.setRequired(false); options.addOption(helpOption); HelpFormatter formatter = new HelpFormatter(); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true); System.exit(0); } if (cmd.hasOption("h")) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true); System.exit(0); } boolean s3 = cmd.hasOption("s3"); String s3bucket = ""; String awsAccessKeyId = ""; String awsSecretAccessKey = ""; if (s3) { if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) { System.out.println( "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS."); formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true); System.exit(0); } s3bucket = cmd.getOptionValue("b"); awsAccessKeyId = cmd.getOptionValue("aws_id"); awsSecretAccessKey = cmd.getOptionValue("aws_key"); } boolean snappyCompression = false; boolean bzip2Compression = false; String machine = cmd.getOptionValue("m"); int nbNodes = Integer.parseInt(cmd.getOptionValue("n")); Configuration s3conf = new Configuration(); if (s3) { s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); s3conf.set("bucket", s3bucket); } String datasetNames = ""; String datasetIds = ""; ArrayList<String> shortDataset = new ArrayList<String>(); ArrayList<String> shortDatasetIndex = new ArrayList<String>(); HashMap<String, String> datasetAgg = new HashMap<String, String>(); HashMap<String, String> datasetId = new HashMap<String, String>(); HashMap<String, HashMap<Integer, Double>> datasetRegThreshold = new HashMap<String, HashMap<Integer, Double>>(); HashMap<String, HashMap<Integer, Double>> datasetRareThreshold = new HashMap<String, HashMap<Integer, Double>>(); Path path = null; FileSystem fs = FileSystem.get(new Configuration()); BufferedReader br; boolean removeExistingFiles = cmd.hasOption("f"); boolean isThresholdUserDefined = cmd.hasOption("t"); for (String dataset : cmd.getOptionValues("g")) { // getting aggregates String[] aggregate = FrameworkUtils.searchAggregates(dataset, s3conf, s3); if (aggregate.length == 0) { System.out.println("No aggregates found for " + dataset + "."); continue; } // getting aggregates header String aggregatesHeaderFileName = FrameworkUtils.searchAggregatesHeader(dataset, s3conf, s3); if (aggregatesHeaderFileName == null) { System.out.println("No aggregate header for " + dataset); continue; } String aggregatesHeader = s3bucket + FrameworkUtils.preProcessingDir + "/" + aggregatesHeaderFileName; shortDataset.add(dataset); datasetId.put(dataset, null); if (s3) { path = new Path(aggregatesHeader); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + aggregatesHeader); } br = new BufferedReader(new InputStreamReader(fs.open(path))); datasetAgg.put(dataset, br.readLine().split("\t")[1]); br.close(); if (s3) fs.close(); } if (shortDataset.size() == 0) { System.out.println("No datasets to process."); System.exit(0); } // getting dataset id if (s3) { path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir); } br = new BufferedReader(new InputStreamReader(fs.open(path))); String line = br.readLine(); while (line != null) { String[] dt = line.split("\t"); if (datasetId.containsKey(dt[0])) { datasetId.put(dt[0], dt[1]); datasetNames += dt[0] + ","; datasetIds += dt[1] + ","; } line = br.readLine(); } br.close(); datasetNames = datasetNames.substring(0, datasetNames.length() - 1); datasetIds = datasetIds.substring(0, datasetIds.length() - 1); Iterator<String> it = shortDataset.iterator(); while (it.hasNext()) { String dataset = it.next(); if (datasetId.get(dataset) == null) { System.out.println("No dataset id for " + dataset); System.exit(0); } } // getting user defined thresholds if (isThresholdUserDefined) { if (s3) { path = new Path(s3bucket + FrameworkUtils.thresholdDir); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.thresholdDir); } br = new BufferedReader(new InputStreamReader(fs.open(path))); line = br.readLine(); while (line != null) { // getting dataset name String dataset = line.trim(); HashMap<Integer, Double> regThresholds = new HashMap<Integer, Double>(); HashMap<Integer, Double> rareThresholds = new HashMap<Integer, Double>(); line = br.readLine(); while ((line != null) && (line.split("\t").length > 1)) { // getting attribute ids and thresholds String[] keyVals = line.trim().split("\t"); int att = Integer.parseInt(keyVals[0].trim()); regThresholds.put(att, Double.parseDouble(keyVals[1].trim())); rareThresholds.put(att, Double.parseDouble(keyVals[2].trim())); line = br.readLine(); } datasetRegThreshold.put(dataset, regThresholds); datasetRareThreshold.put(dataset, rareThresholds); } br.close(); } if (s3) fs.close(); // datasets that will use existing merge tree ArrayList<String> useMergeTree = new ArrayList<String>(); // creating index for each spatio-temporal resolution FrameworkUtils.createDir(s3bucket + FrameworkUtils.indexDir, s3conf, s3); HashSet<String> input = new HashSet<String>(); for (String dataset : shortDataset) { String indexCreationOutputFileName = s3bucket + FrameworkUtils.indexDir + "/" + dataset + "/"; String mergeTreeFileName = s3bucket + FrameworkUtils.mergeTreeDir + "/" + dataset + "/"; if (removeExistingFiles) { FrameworkUtils.removeFile(indexCreationOutputFileName, s3conf, s3); FrameworkUtils.removeFile(mergeTreeFileName, s3conf, s3); FrameworkUtils.createDir(mergeTreeFileName, s3conf, s3); } else if (datasetRegThreshold.containsKey(dataset)) { FrameworkUtils.removeFile(indexCreationOutputFileName, s3conf, s3); if (FrameworkUtils.fileExists(mergeTreeFileName, s3conf, s3)) { useMergeTree.add(dataset); } } if (!FrameworkUtils.fileExists(indexCreationOutputFileName, s3conf, s3)) { input.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset); shortDatasetIndex.add(dataset); } } if (input.isEmpty()) { System.out.println("All the input datasets have indices."); System.out.println("Use -f in the beginning of the command line to force the computation."); System.exit(0); } String aggregateDatasets = ""; it = input.iterator(); while (it.hasNext()) { aggregateDatasets += it.next() + ","; } Job icJob = null; Configuration icConf = new Configuration(); Machine machineConf = new Machine(machine, nbNodes); String jobName = "index"; String indexOutputDir = s3bucket + FrameworkUtils.indexDir + "/tmp/"; FrameworkUtils.removeFile(indexOutputDir, s3conf, s3); icConf.set("dataset-name", datasetNames); icConf.set("dataset-id", datasetIds); if (!useMergeTree.isEmpty()) { String useMergeTreeStr = ""; for (String dt : useMergeTree) { useMergeTreeStr += dt + ","; } icConf.set("use-merge-tree", useMergeTreeStr.substring(0, useMergeTreeStr.length() - 1)); } for (int i = 0; i < shortDataset.size(); i++) { String dataset = shortDataset.get(i); String id = datasetId.get(dataset); icConf.set("dataset-" + id + "-aggregates", datasetAgg.get(dataset)); if (datasetRegThreshold.containsKey(dataset)) { HashMap<Integer, Double> regThresholds = datasetRegThreshold.get(dataset); String thresholds = ""; for (int att : regThresholds.keySet()) { thresholds += String.valueOf(att) + "-" + String.valueOf(regThresholds.get(att)) + ","; } icConf.set("regular-" + id, thresholds.substring(0, thresholds.length() - 1)); } if (datasetRareThreshold.containsKey(dataset)) { HashMap<Integer, Double> rareThresholds = datasetRareThreshold.get(dataset); String thresholds = ""; for (int att : rareThresholds.keySet()) { thresholds += String.valueOf(att) + "-" + String.valueOf(rareThresholds.get(att)) + ","; } icConf.set("rare-" + id, thresholds.substring(0, thresholds.length() - 1)); } } icConf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); icConf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); icConf.set("mapreduce.jobtracker.maxtasks.perjob", "-1"); icConf.set("mapreduce.reduce.shuffle.parallelcopies", "20"); icConf.set("mapreduce.input.fileinputformat.split.minsize", "0"); icConf.set("mapreduce.task.io.sort.mb", "200"); icConf.set("mapreduce.task.io.sort.factor", "100"); //icConf.set("mapreduce.task.timeout", "1800000"); machineConf.setMachineConfiguration(icConf); if (s3) { machineConf.setMachineConfiguration(icConf); icConf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); icConf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); icConf.set("bucket", s3bucket); } if (snappyCompression) { icConf.set("mapreduce.map.output.compress", "true"); icConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); //icConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); } if (bzip2Compression) { icConf.set("mapreduce.map.output.compress", "true"); icConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); //icConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); } icJob = new Job(icConf); icJob.setJobName(jobName); icJob.setMapOutputKeyClass(AttributeResolutionWritable.class); icJob.setMapOutputValueClass(SpatioTemporalFloatWritable.class); icJob.setOutputKeyClass(AttributeResolutionWritable.class); icJob.setOutputValueClass(TopologyTimeSeriesWritable.class); //icJob.setOutputKeyClass(Text.class); //icJob.setOutputValueClass(Text.class); icJob.setMapperClass(IndexCreationMapper.class); icJob.setReducerClass(IndexCreationReducer.class); icJob.setNumReduceTasks(machineConf.getNumberReduces()); icJob.setInputFormatClass(SequenceFileInputFormat.class); //icJob.setOutputFormatClass(SequenceFileOutputFormat.class); LazyOutputFormat.setOutputFormatClass(icJob, SequenceFileOutputFormat.class); //LazyOutputFormat.setOutputFormatClass(icJob, TextOutputFormat.class); SequenceFileOutputFormat.setCompressOutput(icJob, true); SequenceFileOutputFormat.setOutputCompressionType(icJob, CompressionType.BLOCK); FileInputFormat.setInputDirRecursive(icJob, true); FileInputFormat.setInputPaths(icJob, aggregateDatasets.substring(0, aggregateDatasets.length() - 1)); FileOutputFormat.setOutputPath(icJob, new Path(indexOutputDir)); icJob.setJarByClass(IndexCreation.class); long start = System.currentTimeMillis(); icJob.submit(); icJob.waitForCompletion(true); System.out.println(jobName + "\t" + (System.currentTimeMillis() - start)); // moving files to right place for (String dataset : shortDatasetIndex) { String from = s3bucket + FrameworkUtils.indexDir + "/tmp/" + dataset + "/"; String to = s3bucket + FrameworkUtils.indexDir + "/" + dataset + "/"; FrameworkUtils.renameFile(from, to, s3conf, s3); } }
From source file:Main.java
public static Object keyString(HashMap<String, String> map, String o) { Iterator<String> it = map.keySet().iterator(); while (it.hasNext()) { Object keyString = it.next(); if (map.get(keyString).equals(o)) return keyString; }/*from w w w . j a va 2 s . c om*/ return null; }
From source file:Main.java
/** * Finds the difference between two HashMaps a and b. * Returns a HashMap containing elements in b that are not in a * * @return the difference as a HashMap/* ww w . ja v a2s . co m*/ */ public static HashMap hashMapDifference(HashMap a, HashMap b) { Iterator bKeyIterator = b.keySet().iterator(); Object key; Object value; HashMap difference = new HashMap(); while (bKeyIterator.hasNext()) { key = bKeyIterator.next(); if (!a.containsKey(key)) { value = b.get(key); difference.put(key, value); } } return difference; }
From source file:Main.java
/** * The security value is the sum of deviations between the distance * of one specific place and all the other ones in the placeDistanceMap. * * @param distanceAtOnePlace the distance for one specific place * @param placeDistanceMap the place distance map that the single gets compared to * @return the double value of the sum of deviation, also called "security value" *///from ww w .j a v a 2 s. c om public static double securityValue(double distanceAtOnePlace, HashMap<String, Double> placeDistanceMap) { double sumDeviation = 0; for (String key : placeDistanceMap.keySet()) { sumDeviation += Math.abs(distanceAtOnePlace - placeDistanceMap.get(key)); } return sumDeviation; }
From source file:Main.java
static String generateURLForm(HashMap<String, String> data) throws UnsupportedEncodingException { Set<String> keys = data.keySet(); Iterator<String> keyIterator = keys.iterator(); String content = ""; for (int i = 0; keyIterator.hasNext(); i++) { String key = keyIterator.next(); if (i != 0) { content += "&"; }//from w w w . j a va 2s.co m content += key + "=" + URLEncoder.encode(data.get(key), "UTF-8"); } return content; }
From source file:Main.java
@SuppressWarnings("unchecked") static public LinkedHashMap<Object, Comparable> sortHashMapByValues(HashMap<Object, Comparable> passedMap) { ArrayList mapKeys = new ArrayList(passedMap.keySet()); ArrayList mapValues = new ArrayList(passedMap.values()); Collections.sort(mapValues);/*from w w w . j av a2 s. c o m*/ Collections.sort(mapKeys); LinkedHashMap<Object, Comparable> sortedMap = new LinkedHashMap<Object, Comparable>(); Iterator<Comparable> valueIt = mapValues.iterator(); while (valueIt.hasNext()) { Comparable val = valueIt.next(); Iterator keyIt = mapKeys.iterator(); while (keyIt.hasNext()) { Object key = keyIt.next(); Comparable comp = passedMap.get(key); if (comp.equals(val)) { passedMap.remove(key); mapKeys.remove(key); sortedMap.put(key, val); break; } } } return sortedMap; }
From source file:Main.java
private static HashMap<Date, Date> getBookedTimeInOneDate(HashMap<Date, Date> bookedTimes, Date date) { HashMap<Date, Date> result = new HashMap<>(); for (Date key : bookedTimes.keySet()) { if (isSameDay(key, date)) { result.put(key, bookedTimes.get(key)); }/* w w w . j a va 2 s. c om*/ } return result; }
From source file:net.sourceforge.fenixedu.presentationTier.validator.form.ValidateMultiRadioSelect.java
/** * Verifies if the hashmap, that contains the unique multi-radio button * select, is not empty and has no empty fields Which means that by default * all key entries must exist and be empty * /* w w w . ja va 2 s . c om*/ * @param bean * @param va * @param field * @param errors * @param request * @return */ public static boolean validate(Object bean, ValidatorAction va, Field field, ActionMessages errors, HttpServletRequest request, ServletContext application) { DynaActionForm form = (DynaActionForm) bean; HashMap hashMap = (HashMap) form.get(field.getProperty()); if (hashMap.keySet().size() == 0) { errors.add(field.getKey(), Resources.getActionMessage(request, va, field)); return false; } Iterator iterator = hashMap.keySet().iterator(); while (iterator.hasNext()) { String key = (String) iterator.next(); if (hashMap.get(key).equals("")) { errors.add(field.getKey(), Resources.getActionMessage(request, va, field)); return false; } } return true; }
From source file:Main.java
private static HashMap<Date, Date> getBookedTimeInOneDate(HashMap<Date, Date> bookedTimes, Date date) { HashMap<Date, Date> result = new HashMap<Date, Date>(); for (Date key : bookedTimes.keySet()) { if (isSameDay(key, date)) { result.put(key, bookedTimes.get(key)); }//from w ww .jav a 2 s. com } return result; }