List of usage examples for java.util HashMap put
public V put(K key, V value)
From source file:com.thed.zapi.cloud.sample.CycleExecutionReportByVersion.java
public static void main(String[] args) throws JSONException, URISyntaxException, ParseException, IOException { String API_GET_EXECUTIONS = "{SERVER}/public/rest/api/1.0/executions/search/cycle/"; String API_GET_CYCLES = "{SERVER}/public/rest/api/1.0/cycles/search?"; // Delimiter used in CSV file final String NEW_LINE_SEPARATOR = "\n"; final String fileName = "F:\\cycleExecutionReport.csv"; /** Declare JIRA,Zephyr URL,access and secret Keys */ // JIRA Cloud URL of the instance String jiraBaseURL = "https://demo.atlassian.net"; // Replace zephyr baseurl <ZAPI_Cloud_URL> shared with the user for ZAPI Cloud Installation String zephyrBaseUrl = "<ZAPI_Cloud_URL>"; // zephyr accessKey , we can get from Addons >> zapi section String accessKey = "YjE2MjdjMGEtNzExNy0zYjY1LWFkMzQtNjcwMDM3OTljFkbWluIGFkbWlu"; // zephyr secretKey , we can get from Addons >> zapi section String secretKey = "qufnbimi96Ob2hq3ISF08yZ8Qw4c1eHGeGlk"; /** Declare parameter values here */ String userName = "admin"; String versionId = "-1"; String projectId = "10100"; String projectName = "Support"; String versionName = "Unscheduled"; ZFJCloudRestClient client = ZFJCloudRestClient.restBuilder(zephyrBaseUrl, accessKey, secretKey, userName) .build();//ww w .jav a 2 s.c o m /** * Get List of Cycles by Project and Version */ final String getCyclesUri = API_GET_CYCLES.replace("{SERVER}", zephyrBaseUrl) + "projectId=" + projectId + "&versionId=" + versionId; Map<String, String> cycles = getCyclesByProjectVersion(getCyclesUri, client, accessKey); // System.out.println("cycles :"+ cycles.toString()); /** * Iterating over the Cycles and writing the report to CSV * */ FileWriter fileWriter = null; System.out.println("Writing CSV file....."); try { fileWriter = new FileWriter(fileName); // Write the CSV file header fileWriter.append("Cycle Execution Report By Version and Project"); fileWriter.append(NEW_LINE_SEPARATOR); fileWriter.append("PROJECT:" + "," + projectName); fileWriter.append(NEW_LINE_SEPARATOR); fileWriter.append("VERSION:" + "," + versionName); fileWriter.append(NEW_LINE_SEPARATOR); JSONArray executions; int totalUnexecutedCount = 0; int totalExecutionCount = 0; for (String key : cycles.keySet()) { int executionCount = 0; int unexecutedCount = 0; final String getExecutionsUri = API_GET_EXECUTIONS.replace("{SERVER}", zephyrBaseUrl) + key + "?projectId=" + projectId + "&versionId=" + versionId; fileWriter.append("Cycle:" + "," + cycles.get(key)); fileWriter.append(NEW_LINE_SEPARATOR); executions = getExecutionsByCycleId(getExecutionsUri, client, accessKey); // System.out.println("executions :" + executions.toString()); HashMap<String, Integer> counter = new HashMap<String, Integer>(); String[] statusName = new String[executions.length()]; for (int i = 0; i < executions.length(); i++) { JSONObject executionObj = executions.getJSONObject(i).getJSONObject("execution"); // System.out.println("executionObj // "+executionObj.toString()); JSONObject statusObj = executionObj.getJSONObject("status"); // System.out.println("statusObj :"+statusObj.toString()); statusName[i] = statusObj.getString("name"); } if (statusName.length != 0) { // System.out.println(statusName.toString()); for (String a : statusName) { if (counter.containsKey(a)) { int oldValue = counter.get(a); counter.put(a, oldValue + 1); } else { counter.put(a, 1); } } for (String status : counter.keySet()) { fileWriter.append(" " + "," + " " + "," + status + "," + counter.get(status)); fileWriter.append(NEW_LINE_SEPARATOR); if (status.equalsIgnoreCase("UNEXECUTED")) { unexecutedCount += counter.get(status); } else { executionCount += counter.get(status); } } } totalExecutionCount += executionCount; totalUnexecutedCount += unexecutedCount; fileWriter.append(NEW_LINE_SEPARATOR); } fileWriter.append(NEW_LINE_SEPARATOR); fileWriter.append("TOTAL CYCLES:" + "," + cycles.size()); fileWriter.append(NEW_LINE_SEPARATOR); fileWriter.append("TOTAL EXECUTIONS:" + "," + totalExecutionCount); fileWriter.append(NEW_LINE_SEPARATOR); fileWriter.append("TOTAL ASSIGNED:" + "," + (totalUnexecutedCount + totalExecutionCount)); System.out.println("CSV file was created successfully !!!"); } catch (Exception e) { System.out.println("Error in CsvFileWriter !!!"); e.printStackTrace(); } finally { try { fileWriter.flush(); fileWriter.close(); } catch (IOException e) { System.out.println("Error while flushing/closing fileWriter !!!"); e.printStackTrace(); } } }
From source file:DIA_Umpire_Quant.DIA_Umpire_IntLibSearch.java
/** * @param args the command line arguments *///from w w w . ja v a 2 s. c o m public static void main(String[] args) throws FileNotFoundException, IOException, Exception { System.out.println( "================================================================================================="); System.out.println("DIA-Umpire targeted re-extraction analysis using internal library (version: " + UmpireInfo.GetInstance().Version + ")"); if (args.length != 1) { System.out.println( "command format error, the correct format should be : java -jar -Xmx10G DIA_Umpire_IntLibSearch.jar diaumpire_module.params"); return; } try { ConsoleLogger.SetConsoleLogger(Level.INFO); ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_intlibsearch.log"); } catch (Exception e) { } Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version); Logger.getRootLogger().info("Parameter file:" + args[0]); BufferedReader reader = new BufferedReader(new FileReader(args[0])); String line = ""; String WorkFolder = ""; int NoCPUs = 2; String InternalLibID = ""; float ProbThreshold = 0.99f; float RTWindow_Int = -1f; float Freq = 0f; int TopNFrag = 6; TandemParam tandemPara = new TandemParam(DBSearchParam.SearchInstrumentType.TOF5600); HashMap<String, File> AssignFiles = new HashMap<>(); //<editor-fold defaultstate="collapsed" desc="Reading parameter file"> while ((line = reader.readLine()) != null) { line = line.trim(); Logger.getRootLogger().info(line); if (!"".equals(line) && !line.startsWith("#")) { //System.out.println(line); if (line.equals("==File list begin")) { do { line = reader.readLine(); line = line.trim(); if (line.equals("==File list end")) { continue; } else if (!"".equals(line)) { File newfile = new File(line); if (newfile.exists()) { AssignFiles.put(newfile.getAbsolutePath(), newfile); } else { Logger.getRootLogger().info("File: " + newfile + " does not exist."); } } } while (!line.equals("==File list end")); } if (line.split("=").length < 2) { continue; } String type = line.split("=")[0].trim(); String value = line.split("=")[1].trim(); switch (type) { case "Path": { WorkFolder = value; break; } case "path": { WorkFolder = value; break; } case "Thread": { NoCPUs = Integer.parseInt(value); break; } case "InternalLibID": { InternalLibID = value; break; } case "RTWindow_Int": { RTWindow_Int = Float.parseFloat(value); break; } case "ProbThreshold": { ProbThreshold = Float.parseFloat(value); break; } case "TopNFrag": { TopNFrag = Integer.parseInt(value); break; } case "Freq": { Freq = Float.parseFloat(value); break; } case "Fasta": { tandemPara.FastaPath = value; break; } } } } //</editor-fold> //Initialize PTM manager using compomics library PTMManager.GetInstance(); //Check if the fasta file can be found if (!new File(tandemPara.FastaPath).exists()) { Logger.getRootLogger().info("Fasta file :" + tandemPara.FastaPath + " cannot be found, the process will be terminated, please check."); System.exit(1); } //Generate DIA file list ArrayList<DIAPack> FileList = new ArrayList<>(); try { File folder = new File(WorkFolder); if (!folder.exists()) { Logger.getRootLogger().info("The path : " + WorkFolder + " cannot be found."); System.exit(1); } for (final File fileEntry : folder.listFiles()) { if (fileEntry.isFile() && (fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry.getAbsolutePath(), fileEntry); } if (fileEntry.isDirectory()) { for (final File fileEntry2 : fileEntry.listFiles()) { if (fileEntry2.isFile() && (fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry2.getAbsolutePath(), fileEntry2); } } } } Logger.getRootLogger().info("No. of files assigned :" + AssignFiles.size()); for (File fileEntry : AssignFiles.values()) { Logger.getRootLogger().info(fileEntry.getAbsolutePath()); } for (File fileEntry : AssignFiles.values()) { String mzXMLFile = fileEntry.getAbsolutePath(); if (mzXMLFile.toLowerCase().endsWith(".mzxml") | mzXMLFile.toLowerCase().endsWith(".mzml")) { DIAPack DiaFile = new DIAPack(mzXMLFile, NoCPUs); Logger.getRootLogger().info( "================================================================================================="); Logger.getRootLogger().info("Processing " + mzXMLFile); if (!DiaFile.LoadDIASetting()) { Logger.getRootLogger().info("Loading DIA setting failed, job is incomplete"); System.exit(1); } if (!DiaFile.LoadParams()) { Logger.getRootLogger().info("Loading parameters failed, job is incomplete"); System.exit(1); } Logger.getRootLogger().info("Loading identification results " + mzXMLFile + "...."); //If the serialization file for ID file existed if (DiaFile.ReadSerializedLCMSID()) { DiaFile.IDsummary.ReduceMemoryUsage(); DiaFile.IDsummary.FastaPath = tandemPara.FastaPath; FileList.add(DiaFile); } } } //<editor-fold defaultstate="collapsed" desc="Targete re-extraction using internal library"> Logger.getRootLogger().info( "================================================================================================="); if (FileList.size() > 1) { Logger.getRootLogger().info("Targeted re-extraction using internal library"); FragmentLibManager libManager = FragmentLibManager.ReadFragmentLibSerialization(WorkFolder, InternalLibID); if (libManager == null) { Logger.getRootLogger().info("Building internal spectral library"); libManager = new FragmentLibManager(InternalLibID); ArrayList<LCMSID> LCMSIDList = new ArrayList<>(); for (DIAPack dia : FileList) { LCMSIDList.add(dia.IDsummary); } libManager.ImportFragLibTopFrag(LCMSIDList, Freq, TopNFrag); libManager.WriteFragmentLibSerialization(WorkFolder); } libManager.ReduceMemoryUsage(); Logger.getRootLogger() .info("Building retention time prediction model and generate candidate peptide list"); for (int i = 0; i < FileList.size(); i++) { FileList.get(i).IDsummary.ClearMappedPep(); } for (int i = 0; i < FileList.size(); i++) { for (int j = i + 1; j < FileList.size(); j++) { RTAlignedPepIonMapping alignment = new RTAlignedPepIonMapping(WorkFolder, FileList.get(i).GetParameter(), FileList.get(i).IDsummary, FileList.get(j).IDsummary); alignment.GenerateModel(); alignment.GenerateMappedPepIon(); } FileList.get(i).ExportID(); FileList.get(i).IDsummary = null; } Logger.getRootLogger().info("Targeted matching........"); for (DIAPack diafile : FileList) { if (diafile.IDsummary == null) { diafile.ReadSerializedLCMSID(); } if (!diafile.IDsummary.GetMappedPepIonList().isEmpty()) { diafile.UseMappedIon = true; diafile.FilterMappedIonByProb = false; diafile.BuildStructure(); diafile.MS1FeatureMap.ReadPeakCluster(); diafile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster(); diafile.GenerateMassCalibrationRTMap(); diafile.TargetedExtractionQuant(false, libManager, ProbThreshold, RTWindow_Int); diafile.MS1FeatureMap.ClearAllPeaks(); diafile.IDsummary.ReduceMemoryUsage(); diafile.IDsummary.RemoveLowProbMappedIon(ProbThreshold); diafile.ExportID(); Logger.getRootLogger().info("Peptide ions: " + diafile.IDsummary.GetPepIonList().size() + " Mapped ions: " + diafile.IDsummary.GetMappedPepIonList().size()); diafile.ClearStructure(); } diafile.IDsummary = null; System.gc(); } Logger.getRootLogger().info( "================================================================================================="); } //</editor-fold> Logger.getRootLogger().info("Job done"); Logger.getRootLogger().info( "================================================================================================="); } catch (Exception e) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e)); throw e; } }
From source file:licenseUtil.LicenseUtil.java
public static void main(String[] args) throws IOException, IncompleteLicenseObjectException { if (args.length == 0) { logger.error("Missing parameters. Use --help to get a list of the possible options."); } else if (args[0].equals("--addPomToTsv")) { if (args.length < 4) logger.error(/* ww w . j a va 2 s .c om*/ "Missing arguments for option --addPomToTsv. Please specify <pomFileName> <licenses.stub.tsv> <currentVersion> or use the option --help for further information."); String pomFN = args[1]; String spreadSheetFN = args[2]; String currentVersion = args[3]; MavenProject project = null; try { project = Utils.readPom(new File(pomFN)); } catch (XmlPullParserException e) { logger.error("Could not parse pom file: \"" + pomFN + "\""); } LicensingList licensingList = new LicensingList(); File f = new File(spreadSheetFN); if (f.exists() && !f.isDirectory()) { licensingList.readFromSpreadsheet(spreadSheetFN); } licensingList.addMavenProject(project, currentVersion); licensingList.writeToSpreadsheet(spreadSheetFN); } else if (args[0].equals("--writeLicense3rdParty")) { if (args.length < 4) logger.error( "Missing arguments for option --writeLicense3rdParty. Please provide <licenses.enhanced.tsv> <processModule> <currentVersion> [and <targetDir>] or use the option --help for further information."); String spreadSheetFN = args[1]; String processModule = args[2]; String currentVersion = args[3]; HashMap<String, String> targetDirs = new HashMap<>(); if (args.length > 4) { File targetDir = new File(args[4]); logger.info("scan pom files in direct subdirectories of \"" + targetDir.getPath() + "\" to obtain target locations for 3rd party license files..."); File[] subdirs = targetDir.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY); for (File subdir : subdirs) { File pomFile = new File(subdir.getPath() + File.separator + POM_FN); if (!pomFile.exists()) continue; MavenProject mavenProject; try { mavenProject = Utils.readPom(pomFile); } catch (Exception e) { logger.warn("Could not read from pom file: \"" + pomFile.getPath() + "\" because of " + e.getMessage()); continue; } targetDirs.put(mavenProject.getModel().getArtifactId(), subdir.getAbsolutePath()); } } LicensingList licensingList = new LicensingList(); licensingList.readFromSpreadsheet(spreadSheetFN); if (processModule.toUpperCase().equals("ALL")) { for (String module : licensingList.getNonFixedHeaders()) { try { writeLicense3rdPartyFile(module, licensingList, currentVersion, targetDirs.get(module)); } catch (NoLicenseTemplateSetException e) { logger.error("Could not write file for module \"" + module + "\". There is no template specified for \"" + e.getLicensingObject() + "\". Please add an existing template filename to the column \"" + LicensingObject.ColumnHeader.LICENSE_TEMPLATE.value() + "\" of \"" + spreadSheetFN + "\"."); } } } else { try { writeLicense3rdPartyFile(processModule, licensingList, currentVersion, targetDirs.get(processModule)); } catch (NoLicenseTemplateSetException e) { logger.error("Could not write file for module \"" + processModule + "\". There is no template specified for \"" + e.getLicensingObject() + "\". Please add an existing template filename to the column \"" + LicensingObject.ColumnHeader.LICENSE_TEMPLATE.value() + "\" of \"" + spreadSheetFN + "\"."); } } } else if (args[0].equals("--buildEffectivePom")) { Utils.writeEffectivePom(new File(args[1]), (new File(EFFECTIVE_POM_FN)).getAbsolutePath()); } else if (args[0].equals("--updateTsvWithProjectsInFolder")) { if (args.length < 4) logger.error( "Missing arguments for option --processProjectsInFolder. Please provide <superDirectory> <licenses.stub.tsv> and <currentVersion> or use the option --help for further information."); File directory = new File(args[1]); String spreadSheetFN = args[2]; String currentVersion = args[3]; LicensingList licensingList = new LicensingList(); File f = new File(spreadSheetFN); if (f.exists() && !f.isDirectory()) { licensingList.readFromSpreadsheet(spreadSheetFN); } licensingList.addAll(processProjectsInFolder(directory, currentVersion, false)); licensingList.writeToSpreadsheet(spreadSheetFN); } else if (args[0].equals("--purgeTsv")) { if (args.length < 3) logger.error( "Missing arguments for option --purgeTsv. Please provide <spreadSheetIN.tsv>, <spreadSheetOUT.tsv> and <currentVersion> or use the option --help for further information."); String spreadSheetIN = args[1]; String spreadSheetOUT = args[2]; String currentVersion = args[3]; LicensingList licensingList = new LicensingList(); licensingList.readFromSpreadsheet(spreadSheetIN); licensingList.purge(currentVersion); licensingList.writeToSpreadsheet(spreadSheetOUT); } else if (args[0].equals("--help")) { InputStream in = LicenseUtil.class.getClassLoader().getResourceAsStream(README_PATH); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); String line; while ((line = reader.readLine()) != null) { System.out.println(line); } } else { logger.error("Unknown parameter: " + args[0] + ". Use --help to get a list of the possible options."); } }
From source file:net.java.sen.tools.MkCompoundTable.java
/** * Build compound word table.//from w w w.j av a 2 s . c o m */ public static void main(String args[]) { ResourceBundle rb = ResourceBundle.getBundle("dictionary"); int pos_start = Integer.parseInt(rb.getString("pos_start")); int pos_size = Integer.parseInt(rb.getString("pos_size")); try { log.info("reading compound word information ... "); HashMap compoundTable = new HashMap(); log.info("load dic: " + rb.getString("compound_word_file")); BufferedReader dicStream = new BufferedReader(new InputStreamReader( new FileInputStream(rb.getString("compound_word_file")), rb.getString("dic.charset"))); String t; int line = 0; StringBuffer pos_b = new StringBuffer(); while ((t = dicStream.readLine()) != null) { CSVParser parser = new CSVParser(t); String csv[] = parser.nextTokens(); if (csv.length < (pos_size + pos_start)) { throw new RuntimeException("format error:" + line); } pos_b.setLength(0); for (int i = pos_start; i < (pos_start + pos_size - 1); i++) { pos_b.append(csv[i]); pos_b.append(','); } pos_b.append(csv[pos_start + pos_size - 1]); pos_b.append(','); for (int i = pos_start + pos_size; i < (csv.length - 2); i++) { pos_b.append(csv[i]); pos_b.append(','); } pos_b.append(csv[csv.length - 2]); compoundTable.put(pos_b.toString(), csv[csv.length - 1]); } dicStream.close(); log.info("done."); log.info("writing compound word table ... "); ObjectOutputStream os = new ObjectOutputStream( new FileOutputStream(rb.getString("compound_word_table"))); os.writeObject(compoundTable); os.close(); log.info("done."); } catch (Exception e) { e.printStackTrace(); System.exit(1); } }
From source file:TwitterClustering.java
public static void main(String[] args) throws FileNotFoundException, IOException { // TODO code application logic here File outFile = new File(args[3]); Scanner s = new Scanner(new File(args[1])).useDelimiter(","); JSONParser parser = new JSONParser(); Set<Cluster> clusterSet = new HashSet<Cluster>(); HashMap<String, Tweet> tweets = new HashMap(); FileWriter fw = new FileWriter(outFile.getAbsoluteFile()); BufferedWriter bw = new BufferedWriter(fw); // init// w w w .j av a 2s.c o m try { Object obj = parser.parse(new FileReader(args[2])); JSONArray jsonArray = (JSONArray) obj; for (int i = 0; i < jsonArray.size(); i++) { Tweet twt = new Tweet(); JSONObject jObj = (JSONObject) jsonArray.get(i); String text = jObj.get("text").toString(); long sum = 0; for (int y = 0; y < text.toCharArray().length; y++) { sum += (int) text.toCharArray()[y]; } String[] token = text.split(" "); String tID = jObj.get("id").toString(); Set<String> mySet = new HashSet<String>(Arrays.asList(token)); twt.setAttributeValue(sum); twt.setText(mySet); twt.setTweetID(tID); tweets.put(tID, twt); } // preparing initial clusters int i = 0; while (s.hasNext()) { String id = s.next();// id Tweet t = tweets.get(id.trim()); clusterSet.add(new Cluster(i + 1, t, new LinkedList())); i++; } Iterator it = tweets.entrySet().iterator(); for (int l = 0; l < 2; l++) { // limit to 25 iterations while (it.hasNext()) { Map.Entry me = (Map.Entry) it.next(); // calculate distance to each centroid Tweet p = (Tweet) me.getValue(); HashMap<Cluster, Float> distMap = new HashMap(); for (Cluster clust : clusterSet) { distMap.put(clust, jaccardDistance(p.getText(), clust.getCentroid().getText())); } HashMap<Cluster, Float> sorted = (HashMap<Cluster, Float>) sortByValue(distMap); sorted.keySet().iterator().next().getMembers().add(p); } // calculate new centroid and update Clusterset for (Cluster clust : clusterSet) { TreeMap<String, Long> tDistMap = new TreeMap(); Tweet newCentroid = null; Long avgSumDist = new Long(0); for (int j = 0; j < clust.getMembers().size(); j++) { avgSumDist += clust.getMembers().get(j).getAttributeValue(); tDistMap.put(clust.getMembers().get(j).getTweetID(), clust.getMembers().get(j).getAttributeValue()); } if (clust.getMembers().size() != 0) { avgSumDist /= (clust.getMembers().size()); } ArrayList<Long> listValues = new ArrayList<Long>(tDistMap.values()); if (tDistMap.containsValue(findClosestNumber(listValues, avgSumDist))) { // found closest newCentroid = tweets .get(getKeyByValue(tDistMap, findClosestNumber(listValues, avgSumDist))); clust.setCentroid(newCentroid); } } } // create an iterator Iterator iterator = clusterSet.iterator(); // check values while (iterator.hasNext()) { Cluster c = (Cluster) iterator.next(); bw.write(c.getId() + "\t"); System.out.print(c.getId() + "\t"); for (Tweet t : c.getMembers()) { bw.write(t.getTweetID() + ", "); System.out.print(t.getTweetID() + ","); } bw.write("\n"); System.out.println(""); } System.out.println(""); System.out.println("SSE " + sumSquaredErrror(clusterSet)); } catch (Exception e) { e.printStackTrace(); } finally { bw.close(); fw.close(); } }
From source file:at.asitplus.regkassen.demo.RKSVCashboxSimulator.java
public static void main(String[] args) { try {//from w w w . ja v a 2s. c om //IMPORTANT HINT REGARDING STRING ENCODING //in Java all Strings have UTF-8 as default encoding //therefore: there are only a few references to UTF-8 encoding in this demo code //however, if values are retrieved from a database or another program language is used, then one needs to //make sure that the UTF-8 encoding is correctly implemented //this demo cashbox does not implement error handling //it should only demonstrate the core elements of the RKSV and any boilerplate code is avoided as much as possible //if an error occurs, only the stacktraces are logged //obviously this needs to be adapted in a productive cashbox //---------------------------------------------------------------------------------------------------- //basic inits //add bouncycastle provider Security.addProvider(new BouncyCastleProvider()); //---------------------------------------------------------------------------------------------------- //check if unlimited strength policy files are installed, they are required for strong crypto algorithms ==> AES 256 if (!CryptoUtil.isUnlimitedStrengthPolicyAvailable()) { System.out.println( "Your JVM does not provide the unlimited strength policy. However, this policy is required to enable strong cryptography (e.g. AES with 256 bits). Please install the required policy files."); System.exit(0); } //---------------------------------------------------------------------------------------------------- //parse cmd line options Options options = new Options(); // add CMD line options options.addOption("o", "output-dir", true, "specify base output directory, if none is specified, a new directory will be created in the current working directory"); //options.addOption("i", "simulation-file-or-directory", true, "cashbox simulation (file) or multiple cashbox simulation files (directory), if none is specified the internal test suites will be executed (can also be considered as demo mode)"); options.addOption("v", "verbose", false, "dump demo receipts to cmd line"); options.addOption("c", "closed system", false, "simulate closed system"); ///parse CMD line options CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); //setup inputs from cmd line //verbose VERBOSE = cmd.hasOption("v"); CLOSED_SYSTEM = cmd.hasOption("c"); //output directory String outputParentDirectoryString = cmd.getOptionValue("o"); if (outputParentDirectoryString == null) { DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); outputParentDirectoryString = "./CashBoxDemoOutput" + df.format(new Date()); } File OUTPUT_PARENT_DIRECTORY = new File(outputParentDirectoryString); OUTPUT_PARENT_DIRECTORY.mkdirs(); //---------------------------------------------------------------------------------------------------- //external simulation runs... not implemented yet, currently only the internal test suites can be executed //String simulationFileOrDirectoryPath = cmd.getOptionValue("i"); //handling of arbitrary input simulation files will be possible in 0.7 //if (simulationFileOrDirectoryPath == null) { //} else { // File simulationFileOrDirectory = new File(simulationFileOrDirectoryPath); // cashBoxSimulationList = readCashBoxSimulationFromFile(simulationFileOrDirectory); //} List<CashBoxSimulation> cashBoxSimulationList = TestSuiteGenerator.getSimulationRuns(); //setup simulation and execute int index = 1; for (CashBoxSimulation cashboxSimulation : cashBoxSimulationList) { System.out.println("Executing simulation run " + index + "/" + cashBoxSimulationList.size()); System.out.println("Simulation run: " + cashboxSimulation.getSimulationRunLabel()); index++; File testSetDirectory = new File(OUTPUT_PARENT_DIRECTORY, cashboxSimulation.getSimulationRunLabel()); testSetDirectory.mkdirs(); CashBoxParameters cashBoxParameters = new CashBoxParameters(); cashBoxParameters.setCashBoxId(cashboxSimulation.getCashBoxId()); cashBoxParameters.setTurnOverCounterAESKey( CryptoUtil.convertBase64KeyToSecretKey(cashboxSimulation.getBase64AesKey())); cashBoxParameters.setDepModul(new SimpleMemoryDEPModule()); cashBoxParameters.setPrinterModule(new SimplePDFPrinterModule()); cashBoxParameters.setCompanyID(cashboxSimulation.getCompanyID()); //create pre-defined number of signature devices for (int i = 0; i < cashboxSimulation.getNumberOfSignatureDevices(); i++) { JWSModule jwsModule = new ManualJWSModule(); SignatureModule signatureModule; if (!CLOSED_SYSTEM) { signatureModule = new NEVER_USE_IN_A_REAL_SYSTEM_SoftwareCertificateOpenSystemSignatureModule( RKSuite.R1_AT100, null); } else { signatureModule = new NEVER_USE_IN_A_REAL_SYSTEM_SoftwareKeySignatureModule( cashboxSimulation.getCompanyID() + "-" + "K" + i); } jwsModule.setOpenSystemSignatureModule(signatureModule); cashBoxParameters.getJwsSignatureModules().add(jwsModule); } //init cashbox DemoCashBox demoCashBox = new DemoCashBox(cashBoxParameters); //exceute simulation run demoCashBox.executeSimulation(cashboxSimulation.getCashBoxInstructionList()); //---------------------------------------------------------------------------------------------------- //export DEP DEPExportFormat depExportFormat = demoCashBox.exportDEP(); //get JSON rep and dump export format to file/std output File depExportFile = new File(testSetDirectory, "dep-export.json"); dumpJSONRepOfObject(depExportFormat, depExportFile, true, "------------DEP-EXPORT-FORMAT------------"); //---------------------------------------------------------------------------------------------------- //store signature certificates and AES key (so that they can be used for verification purposes) CryptographicMaterialContainer cryptographicMaterialContainer = new CryptographicMaterialContainer(); HashMap<String, CertificateOrPublicKeyContainer> certificateContainerMap = new HashMap<>(); cryptographicMaterialContainer.setCertificateOrPublicKeyMap(certificateContainerMap); //store AES key as BASE64 String //ATTENTION, this is only for demonstration purposes, the AES key must be stored in a secure location cryptographicMaterialContainer.setBase64AESKey(cashboxSimulation.getBase64AesKey()); List<JWSModule> jwsSignatureModules = demoCashBox.getCashBoxParameters().getJwsSignatureModules(); for (JWSModule jwsSignatureModule : jwsSignatureModules) { CertificateOrPublicKeyContainer certificateOrPublicKeyContainer = new CertificateOrPublicKeyContainer(); certificateOrPublicKeyContainer.setId(jwsSignatureModule.getSerialNumberOfKeyID()); certificateContainerMap.put(jwsSignatureModule.getSerialNumberOfKeyID(), certificateOrPublicKeyContainer); X509Certificate certificate = (X509Certificate) jwsSignatureModule.getSignatureModule() .getSigningCertificate(); if (certificate == null) { //must be public key based... (closed system) PublicKey publicKey = jwsSignatureModule.getSignatureModule().getSigningPublicKey(); certificateOrPublicKeyContainer.setSignatureCertificateOrPublicKey( CashBoxUtils.base64Encode(publicKey.getEncoded(), false)); certificateOrPublicKeyContainer.setSignatureDeviceType(SignatureDeviceType.PUBLIC_KEY); } else { certificateOrPublicKeyContainer.setSignatureCertificateOrPublicKey( CashBoxUtils.base64Encode(certificate.getEncoded(), false)); certificateOrPublicKeyContainer.setSignatureDeviceType(SignatureDeviceType.CERTIFICATE); } } File cryptographicMaterialContainerFile = new File(testSetDirectory, "cryptographicMaterialContainer.json"); dumpJSONRepOfObject(cryptographicMaterialContainer, cryptographicMaterialContainerFile, true, "------------CRYPTOGRAPHIC MATERIAL------------"); //---------------------------------------------------------------------------------------------------- //export QR codes to file //dump machine readable code of receipts (this "code" is used for the QR-codes) //REF TO SPECIFICATION: Detailspezifikation/Abs 12 //dump to File File qrCoreRepExportFile = new File(testSetDirectory, "qr-code-rep.json"); List<ReceiptPackage> receiptPackages = demoCashBox.getStoredReceipts(); List<String> qrCodeRepList = new ArrayList<>(); for (ReceiptPackage receiptPackage : receiptPackages) { qrCodeRepList.add(CashBoxUtils.getQRCodeRepresentationFromJWSCompactRepresentation( receiptPackage.getJwsCompactRepresentation())); } dumpJSONRepOfObject(qrCodeRepList, qrCoreRepExportFile, true, "------------QR-CODE-REP------------"); //---------------------------------------------------------------------------------------------------- //export OCR codes to file //dump machine readable code of receipts (this "code" is used for the OCR-codes) //REF TO SPECIFICATION: Detailspezifikation/Abs 14 //dump to File File ocrCoreRepExportFile = new File(testSetDirectory, "ocr-code-rep.json"); List<String> ocrCodeRepList = new ArrayList<>(); for (ReceiptPackage receiptPackage : receiptPackages) { ocrCodeRepList.add(CashBoxUtils.getOCRCodeRepresentationFromJWSCompactRepresentation( receiptPackage.getJwsCompactRepresentation())); } dumpJSONRepOfObject(ocrCodeRepList, ocrCoreRepExportFile, true, "------------OCR-CODE-REP------------"); //---------------------------------------------------------------------------------------------------- //create PDF receipts and print to directory //REF TO SPECIFICATION: Detailspezifikation/Abs 12 File qrCodeDumpDirectory = new File(testSetDirectory, "qr-code-dir-pdf"); qrCodeDumpDirectory.mkdirs(); List<byte[]> printedQRCodeReceipts = demoCashBox.printReceipt(receiptPackages, ReceiptPrintType.QR_CODE); CashBoxUtils.writeReceiptsToFiles(printedQRCodeReceipts, "QR-", qrCodeDumpDirectory); //---------------------------------------------------------------------------------------------------- //export receipts as PDF (OCR) //REF TO SPECIFICATION: Detailspezifikation/Abs 14 File ocrCodeDumpDirectory = new File(testSetDirectory, "ocr-code-dir-pdf"); ocrCodeDumpDirectory.mkdirs(); List<byte[]> printedOCRCodeReceipts = demoCashBox.printReceipt(receiptPackages, ReceiptPrintType.OCR); CashBoxUtils.writeReceiptsToFiles(printedOCRCodeReceipts, "OCR-", ocrCodeDumpDirectory); //---------------------------------------------------------------------------------------------------- //dump executed testsuite File testSuiteDumpFile = new File(testSetDirectory, cashboxSimulation.getSimulationRunLabel() + ".json"); dumpJSONRepOfObject(cashboxSimulation, testSuiteDumpFile, true, "------------CASHBOX Simulation------------"); } } catch (CertificateEncodingException e) { e.printStackTrace(); } catch (ParseException e) { e.printStackTrace(); } }
From source file:edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation.java
/** * @param args/*w ww . ja v a 2 s . co m*/ */ @SuppressWarnings({ "deprecation" }) public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Options options = new Options(); Option forceOption = new Option("f", "force", false, "force the computation of the aggregate functions " + "even if files already exist"); forceOption.setRequired(false); options.addOption(forceOption); Option gOption = new Option("g", "group", true, "set group of datasets for which the aggregate functions" + " will be computed, followed by their temporal and spatial attribute indices"); gOption.setRequired(true); gOption.setArgName("GROUP"); gOption.setArgs(Option.UNLIMITED_VALUES); options.addOption(gOption); Option machineOption = new Option("m", "machine", true, "machine identifier"); machineOption.setRequired(true); machineOption.setArgName("MACHINE"); machineOption.setArgs(1); options.addOption(machineOption); Option nodesOption = new Option("n", "nodes", true, "number of nodes"); nodesOption.setRequired(true); nodesOption.setArgName("NODES"); nodesOption.setArgs(1); options.addOption(nodesOption); Option s3Option = new Option("s3", "s3", false, "data on Amazon S3"); s3Option.setRequired(false); options.addOption(s3Option); Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true, "aws access key id; " + "this is required if the execution is on aws"); awsAccessKeyIdOption.setRequired(false); awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID"); awsAccessKeyIdOption.setArgs(1); options.addOption(awsAccessKeyIdOption); Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true, "aws secrect access key; " + "this is required if the execution is on aws"); awsSecretAccessKeyOption.setRequired(false); awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY"); awsSecretAccessKeyOption.setArgs(1); options.addOption(awsSecretAccessKeyOption); Option bucketOption = new Option("b", "s3-bucket", true, "bucket on s3; " + "this is required if the execution is on aws"); bucketOption.setRequired(false); bucketOption.setArgName("S3-BUCKET"); bucketOption.setArgs(1); options.addOption(bucketOption); Option helpOption = new Option("h", "help", false, "display this message"); helpOption.setRequired(false); options.addOption(helpOption); HelpFormatter formatter = new HelpFormatter(); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation", options, true); System.exit(0); } if (cmd.hasOption("h")) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation", options, true); System.exit(0); } boolean s3 = cmd.hasOption("s3"); String s3bucket = ""; String awsAccessKeyId = ""; String awsSecretAccessKey = ""; if (s3) { if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) { System.out.println( "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS."); formatter.printHelp( "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation", options, true); System.exit(0); } s3bucket = cmd.getOptionValue("b"); awsAccessKeyId = cmd.getOptionValue("aws_id"); awsSecretAccessKey = cmd.getOptionValue("aws_key"); } boolean snappyCompression = false; boolean bzip2Compression = false; String machine = cmd.getOptionValue("m"); int nbNodes = Integer.parseInt(cmd.getOptionValue("n")); Configuration s3conf = new Configuration(); if (s3) { s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); s3conf.set("bucket", s3bucket); } String datasetNames = ""; String datasetIds = ""; String preProcessingDatasets = ""; ArrayList<String> shortDataset = new ArrayList<String>(); ArrayList<String> shortDatasetAggregation = new ArrayList<String>(); HashMap<String, String> datasetTempAtt = new HashMap<String, String>(); HashMap<String, String> datasetSpatialAtt = new HashMap<String, String>(); HashMap<String, String> preProcessingDataset = new HashMap<String, String>(); HashMap<String, String> datasetId = new HashMap<String, String>(); boolean removeExistingFiles = cmd.hasOption("f"); String[] datasetArgs = cmd.getOptionValues("g"); for (int i = 0; i < datasetArgs.length; i += 3) { String dataset = datasetArgs[i]; // getting pre-processing String tempPreProcessing = FrameworkUtils.searchPreProcessing(dataset, s3conf, s3); if (tempPreProcessing == null) { System.out.println("No pre-processing available for " + dataset); continue; } preProcessingDataset.put(dataset, tempPreProcessing); shortDataset.add(dataset); datasetTempAtt.put(dataset, ((datasetArgs[i + 1] == "null") ? null : datasetArgs[i + 1])); datasetSpatialAtt.put(dataset, ((datasetArgs[i + 2] == "null") ? null : datasetArgs[i + 2])); datasetId.put(dataset, null); } if (shortDataset.size() == 0) { System.out.println("No datasets to process."); System.exit(0); } // getting dataset id Path path = null; FileSystem fs = null; if (s3) { path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir); fs = FileSystem.get(path.toUri(), s3conf); } else { fs = FileSystem.get(new Configuration()); path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir); } BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path))); String line = br.readLine(); while (line != null) { String[] dt = line.split("\t"); if (datasetId.containsKey(dt[0])) { datasetId.put(dt[0], dt[1]); datasetNames += dt[0] + ","; datasetIds += dt[1] + ","; } line = br.readLine(); } br.close(); if (s3) fs.close(); datasetNames = datasetNames.substring(0, datasetNames.length() - 1); datasetIds = datasetIds.substring(0, datasetIds.length() - 1); Iterator<String> it = shortDataset.iterator(); while (it.hasNext()) { String dataset = it.next(); if (datasetId.get(dataset) == null) { System.out.println("No dataset id for " + dataset); System.exit(0); } } FrameworkUtils.createDir(s3bucket + FrameworkUtils.aggregatesDir, s3conf, s3); // getting smallest resolution HashMap<String, String> tempResMap = new HashMap<String, String>(); HashMap<String, String> spatialResMap = new HashMap<String, String>(); HashMap<String, String> datasetTemporalStrMap = new HashMap<String, String>(); HashMap<String, String> datasetSpatialStrMap = new HashMap<String, String>(); HashSet<String> input = new HashSet<String>(); for (String dataset : shortDataset) { String[] datasetArray = preProcessingDataset.get(dataset).split("-"); String datasetTemporalStr = datasetArray[datasetArray.length - 2]; int datasetTemporal = utils.temporalResolution(datasetTemporalStr); String datasetSpatialStr = datasetArray[datasetArray.length - 1]; int datasetSpatial = utils.spatialResolution(datasetSpatialStr); // finding all possible resolutions String[] temporalResolutions = FrameworkUtils.getAggTempResolutions(datasetTemporal); String[] spatialResolutions = FrameworkUtils.getAggSpatialResolutions(datasetSpatial); String temporalResolution = ""; String spatialResolution = ""; String tempRes = ""; String spatialRes = ""; boolean dataAdded = false; for (int i = 0; i < temporalResolutions.length; i++) { for (int j = 0; j < spatialResolutions.length; j++) { temporalResolution = temporalResolutions[i]; spatialResolution = spatialResolutions[j]; String aggregatesOutputFileName = s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset + "/"; if (removeExistingFiles) { FrameworkUtils.removeFile(aggregatesOutputFileName, s3conf, s3); } if (!FrameworkUtils.fileExists(aggregatesOutputFileName, s3conf, s3)) { dataAdded = true; tempRes += temporalResolution + "-"; spatialRes += spatialResolution + "-"; } } } if (dataAdded) { input.add(s3bucket + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset)); shortDatasetAggregation.add(dataset); tempResMap.put(dataset, tempRes.substring(0, tempRes.length() - 1)); spatialResMap.put(dataset, spatialRes.substring(0, spatialRes.length() - 1)); datasetTemporalStrMap.put(dataset, datasetTemporalStr); datasetSpatialStrMap.put(dataset, datasetSpatialStr); } } if (input.isEmpty()) { System.out.println("All the input datasets have aggregates."); System.out.println("Use -f in the beginning of the command line to force the computation."); System.exit(0); } it = input.iterator(); while (it.hasNext()) { preProcessingDatasets += it.next() + ","; } Job aggJob = null; String aggregatesOutputDir = s3bucket + FrameworkUtils.aggregatesDir + "/tmp/"; String jobName = "aggregates"; FrameworkUtils.removeFile(aggregatesOutputDir, s3conf, s3); Configuration aggConf = new Configuration(); Machine machineConf = new Machine(machine, nbNodes); aggConf.set("dataset-name", datasetNames); aggConf.set("dataset-id", datasetIds); for (int i = 0; i < shortDatasetAggregation.size(); i++) { String dataset = shortDatasetAggregation.get(i); String id = datasetId.get(dataset); aggConf.set("dataset-" + id + "-temporal-resolutions", tempResMap.get(dataset)); aggConf.set("dataset-" + id + "-spatial-resolutions", spatialResMap.get(dataset)); aggConf.set("dataset-" + id + "-temporal-att", datasetTempAtt.get(dataset)); aggConf.set("dataset-" + id + "-spatial-att", datasetSpatialAtt.get(dataset)); aggConf.set("dataset-" + id + "-temporal", datasetTemporalStrMap.get(dataset)); aggConf.set("dataset-" + id + "-spatial", datasetSpatialStrMap.get(dataset)); if (s3) aggConf.set("dataset-" + id, s3bucket + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset)); else aggConf.set("dataset-" + id, FileSystem.get(new Configuration()).getHomeDirectory() + "/" + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset)); } aggConf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); aggConf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); aggConf.set("mapreduce.jobtracker.maxtasks.perjob", "-1"); aggConf.set("mapreduce.reduce.shuffle.parallelcopies", "20"); aggConf.set("mapreduce.input.fileinputformat.split.minsize", "0"); aggConf.set("mapreduce.task.io.sort.mb", "200"); aggConf.set("mapreduce.task.io.sort.factor", "100"); machineConf.setMachineConfiguration(aggConf); if (s3) { machineConf.setMachineConfiguration(aggConf); aggConf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); aggConf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); } if (snappyCompression) { aggConf.set("mapreduce.map.output.compress", "true"); aggConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); //aggConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); } if (bzip2Compression) { aggConf.set("mapreduce.map.output.compress", "true"); aggConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); //aggConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); } aggJob = new Job(aggConf); aggJob.setJobName(jobName); aggJob.setMapOutputKeyClass(SpatioTemporalWritable.class); aggJob.setMapOutputValueClass(AggregationArrayWritable.class); aggJob.setOutputKeyClass(SpatioTemporalWritable.class); aggJob.setOutputValueClass(FloatArrayWritable.class); //aggJob.setOutputKeyClass(Text.class); //aggJob.setOutputValueClass(Text.class); aggJob.setMapperClass(AggregationMapper.class); aggJob.setCombinerClass(AggregationCombiner.class); aggJob.setReducerClass(AggregationReducer.class); aggJob.setNumReduceTasks(machineConf.getNumberReduces()); aggJob.setInputFormatClass(SequenceFileInputFormat.class); //aggJob.setOutputFormatClass(SequenceFileOutputFormat.class); LazyOutputFormat.setOutputFormatClass(aggJob, SequenceFileOutputFormat.class); //LazyOutputFormat.setOutputFormatClass(aggJob, TextOutputFormat.class); SequenceFileOutputFormat.setCompressOutput(aggJob, true); SequenceFileOutputFormat.setOutputCompressionType(aggJob, CompressionType.BLOCK); FileInputFormat.setInputDirRecursive(aggJob, true); FileInputFormat.setInputPaths(aggJob, preProcessingDatasets.substring(0, preProcessingDatasets.length() - 1)); FileOutputFormat.setOutputPath(aggJob, new Path(aggregatesOutputDir)); aggJob.setJarByClass(Aggregation.class); long start = System.currentTimeMillis(); aggJob.submit(); aggJob.waitForCompletion(true); System.out.println(jobName + "\t" + (System.currentTimeMillis() - start)); // moving files to right place for (String dataset : shortDatasetAggregation) { String from = s3bucket + FrameworkUtils.aggregatesDir + "/tmp/" + dataset + "/"; String to = s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset + "/"; FrameworkUtils.renameFile(from, to, s3conf, s3); } }
From source file:hyperheuristics.main.comparisons.ComputeIndicators.java
public static void main(String[] args) throws IOException, InterruptedException { int[] numberOfObjectivesArray = new int[] { 2, 4 }; String[] problems = new String[] { "OO_MyBatis", "OA_AJHsqldb", "OA_AJHotDraw", "OO_BCEL", "OO_JHotDraw", "OA_HealthWatcher", // "OA_TollSystems", "OO_JBoss" }; String[] heuristicFunctions = new String[] { LowLevelHeuristic.CHOICE_FUNCTION, LowLevelHeuristic.MULTI_ARMED_BANDIT, LowLevelHeuristic.RANDOM }; String[] algorithms = new String[] { "NSGA-II", // "SPEA2" };/*from w w w . j ava 2s . c o m*/ MetricsUtil metricsUtil = new MetricsUtil(); DecimalFormat decimalFormatter = new DecimalFormat("0.00E0"); Mean mean = new Mean(); StandardDeviation standardDeviation = new StandardDeviation(); InvertedGenerationalDistance igd = new InvertedGenerationalDistance(); GenerationalDistance gd = new GenerationalDistance(); Spread spread = new Spread(); Coverage coverage = new Coverage(); for (int objectives : numberOfObjectivesArray) { try (FileWriter IGDWriter = new FileWriter("experiment/IGD_" + objectives + ".tex"); FileWriter spreadWriter = new FileWriter("experiment/SPREAD_" + objectives + ".tex"); FileWriter GDWriter = new FileWriter("experiment/GD_" + objectives + ".tex"); FileWriter coverageWriter = new FileWriter("experiment/COVERAGE_" + objectives + ".tex")) { StringBuilder latexTableBuilder = new StringBuilder(); latexTableBuilder.append("\\documentclass{paper}\n").append("\n") .append("\\usepackage[T1]{fontenc}\n").append("\\usepackage[latin1]{inputenc}\n") .append("\\usepackage[hidelinks]{hyperref}\n").append("\\usepackage{tabulary}\n") .append("\\usepackage{booktabs}\n").append("\\usepackage{multirow}\n") .append("\\usepackage{amsmath}\n").append("\\usepackage{mathtools}\n") .append("\\usepackage{graphicx}\n").append("\\usepackage{array}\n") .append("\\usepackage[linesnumbered,ruled,inoutnumbered]{algorithm2e}\n") .append("\\usepackage{subfigure}\n").append("\\usepackage[hypcap]{caption}\n") .append("\\usepackage{pdflscape}\n").append("\n").append("\\begin{document}\n").append("\n") .append("\\begin{landscape}\n").append("\n"); pfKnown: { latexTableBuilder.append("\\begin{table}[!htb]\n").append("\t\\centering\n") .append("\t\\def\\arraystretch{1.5}\n") // .append("\t\\setlength{\\tabcolsep}{10pt}\n") // .append("\t\\fontsize{8pt}{10pt}\\selectfont\n") .append("\t\\caption{INDICATOR found for $PF_{known}$ for ").append(objectives) .append(" objectives}\n").append("\t\\label{tab:INDICATOR ").append(objectives) .append(" objectives}\n").append("\t\\begin{tabulary}{\\linewidth}{c"); for (String algorithm : algorithms) { latexTableBuilder.append("c"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append("c"); } } latexTableBuilder.append("}\n").append("\t\t\\toprule\n").append("\t\t\\textbf{System}"); for (String algorithm : algorithms) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("}"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("-") .append(heuristicFunction).append("}"); } } latexTableBuilder.append("\\\\\n").append("\t\t\\midrule\n"); for (String problem : problems) { NonDominatedSolutionList trueFront = new NonDominatedSolutionList(); pfTrueComposing: { for (String algorithm : algorithms) { SolutionSet mecbaFront = metricsUtil.readNonDominatedSolutionSet( "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); trueFront.addAll(mecbaFront); for (String hyperHeuristic : heuristicFunctions) { SolutionSet front = metricsUtil.readNonDominatedSolutionSet( "experiment/" + algorithm + "/" + objectives + "objectives/" + hyperHeuristic + "/" + problem + "/FUN.txt"); trueFront.addAll(front); } } } double[][] trueFrontMatrix = trueFront.writeObjectivesToMatrix(); HashMap<String, Double> igdMap = new HashMap<>(); HashMap<String, Double> gdMap = new HashMap<>(); HashMap<String, Double> spreadMap = new HashMap<>(); HashMap<String, Double> coverageMap = new HashMap<>(); for (String algorithm : algorithms) { double[][] mecbaFront = metricsUtil .readFront("resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); igdMap.put(algorithm, igd.invertedGenerationalDistance(mecbaFront, trueFrontMatrix, objectives)); gdMap.put(algorithm, gd.generationalDistance(mecbaFront, trueFrontMatrix, objectives)); spreadMap.put(algorithm, spread.spread(mecbaFront, trueFrontMatrix, objectives)); coverageMap.put(algorithm, coverage.coverage(mecbaFront, trueFrontMatrix)); for (String heuristic : heuristicFunctions) { double[][] heuristicFront = metricsUtil.readFront("experiment/" + algorithm + "/" + objectives + "objectives/" + heuristic + "/" + problem + "/FUN.txt"); igdMap.put(algorithm + "-" + heuristic, igd .invertedGenerationalDistance(heuristicFront, trueFrontMatrix, objectives)); gdMap.put(algorithm + "-" + heuristic, gd.generationalDistance(heuristicFront, trueFrontMatrix, objectives)); spreadMap.put(algorithm + "-" + heuristic, spread.spread(heuristicFront, trueFrontMatrix, objectives)); coverageMap.put(algorithm + "-" + heuristic, coverage.coverage(heuristicFront, trueFrontMatrix)); } } latexTableBuilder.append("\t\t").append(problem); String latexTable = latexTableBuilder.toString(); latexTableBuilder = new StringBuilder(); latexTable = latexTable.replaceAll("O[OA]\\_", "").replaceAll("ChoiceFunction", "CF") .replaceAll("MultiArmedBandit", "MAB"); IGDWriter.write(latexTable.replaceAll("INDICATOR", "IGD")); spreadWriter.write(latexTable.replaceAll("INDICATOR", "Spread")); GDWriter.write(latexTable.replaceAll("INDICATOR", "GD")); coverageWriter.write(latexTable.replaceAll("INDICATOR", "Coverage")); String bestHeuristicIGD = "NULL"; String bestHeuristicGD = "NULL"; String bestHeuristicSpread = "NULL"; String bestHeuristicCoverage = "NULL"; getBest: { double bestMeanIGD = Double.POSITIVE_INFINITY; double bestMeanGD = Double.POSITIVE_INFINITY; double bestMeanSpread = Double.NEGATIVE_INFINITY; double bestMeanCoverage = Double.NEGATIVE_INFINITY; for (String heuristic : igdMap.keySet()) { double heuristicIGD = igdMap.get(heuristic); double heuristicGD = gdMap.get(heuristic); double heuristicSpread = spreadMap.get(heuristic); double heuristicCoverage = coverageMap.get(heuristic); if (heuristicIGD < bestMeanIGD) { bestMeanIGD = heuristicIGD; bestHeuristicIGD = heuristic; } if (heuristicGD < bestMeanGD) { bestMeanGD = heuristicGD; bestHeuristicGD = heuristic; } if (heuristicSpread > bestMeanSpread) { bestMeanSpread = heuristicSpread; bestHeuristicSpread = heuristic; } if (heuristicCoverage > bestMeanCoverage) { bestMeanCoverage = heuristicCoverage; bestHeuristicCoverage = heuristic; } } } StringBuilder igdBuilder = new StringBuilder(); StringBuilder gdBuilder = new StringBuilder(); StringBuilder spreadBuilder = new StringBuilder(); StringBuilder coverageBuilder = new StringBuilder(); String[] newHeuristicFunctions = new String[heuristicFunctions.length * algorithms.length + algorithms.length]; fulfillNewHeuristics: { int i = 0; for (String algorithm : algorithms) { newHeuristicFunctions[i++] = algorithm; for (String heuristicFunction : heuristicFunctions) { newHeuristicFunctions[i++] = algorithm + "-" + heuristicFunction; } } } for (String heuristic : newHeuristicFunctions) { igdBuilder.append(" & "); boolean bold = heuristic.equals(bestHeuristicIGD) || igdMap.get(heuristic).equals(igdMap.get(bestHeuristicIGD)); if (bold) { igdBuilder.append("\\textbf{"); } igdBuilder.append(decimalFormatter.format(igdMap.get(heuristic))); if (bold) { igdBuilder.append("}"); } gdBuilder.append(" & "); bold = heuristic.equals(bestHeuristicGD) || gdMap.get(heuristic).equals(gdMap.get(bestHeuristicGD)); if (bold) { gdBuilder.append("\\textbf{"); } gdBuilder.append(decimalFormatter.format(gdMap.get(heuristic))); if (bold) { gdBuilder.append("}"); } spreadBuilder.append(" & "); bold = heuristic.equals(bestHeuristicSpread) || spreadMap.get(heuristic).equals(spreadMap.get(bestHeuristicSpread)); if (bold) { spreadBuilder.append("\\textbf{"); } spreadBuilder.append(decimalFormatter.format(spreadMap.get(heuristic))); if (bold) { spreadBuilder.append("}"); } coverageBuilder.append(" & "); bold = heuristic.equals(bestHeuristicCoverage) || coverageMap.get(heuristic).equals(coverageMap.get(bestHeuristicCoverage)); if (bold) { coverageBuilder.append("\\textbf{"); } coverageBuilder.append(decimalFormatter.format(coverageMap.get(heuristic))); if (bold) { coverageBuilder.append("}"); } } IGDWriter.write(igdBuilder + "\\\\\n"); spreadWriter.write(spreadBuilder + "\\\\\n"); GDWriter.write(gdBuilder + "\\\\\n"); coverageWriter.write(coverageBuilder + "\\\\\n"); } latexTableBuilder = new StringBuilder(); latexTableBuilder.append("\t\t\\bottomrule\n").append("\t\\end{tabulary}\n") .append("\\end{table}\n\n"); } averages: { latexTableBuilder.append("\\begin{table}[!htb]\n").append("\t\\centering\n") .append("\t\\def\\arraystretch{1.5}\n") // .append("\t\\setlength{\\tabcolsep}{10pt}\n") // .append("\t\\fontsize{8pt}{10pt}\\selectfont\n") .append("\t\\caption{INDICATOR averages found for ").append(objectives) .append(" objectives}\n").append("\t\\label{tab:INDICATOR ").append(objectives) .append(" objectives}\n").append("\t\\begin{tabulary}{\\linewidth}{c"); for (String algorithm : algorithms) { latexTableBuilder.append("c"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append("c"); } } latexTableBuilder.append("}\n").append("\t\t\\toprule\n").append("\t\t\\textbf{System}"); for (String algorithm : algorithms) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("}"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("-") .append(heuristicFunction).append("}"); } } latexTableBuilder.append("\\\\\n").append("\t\t\\midrule\n"); for (String problem : problems) { NonDominatedSolutionList trueFront = new NonDominatedSolutionList(); pfTrueComposing: { for (String algorithm : algorithms) { SolutionSet mecbaFront = metricsUtil.readNonDominatedSolutionSet( "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); trueFront.addAll(mecbaFront); for (String hyperHeuristic : heuristicFunctions) { SolutionSet front = metricsUtil.readNonDominatedSolutionSet( "experiment/" + algorithm + "/" + objectives + "objectives/" + hyperHeuristic + "/" + problem + "/FUN.txt"); trueFront.addAll(front); } } } double[][] trueFrontMatrix = trueFront.writeObjectivesToMatrix(); HashMap<String, double[]> igdMap = new HashMap<>(); HashMap<String, double[]> gdMap = new HashMap<>(); HashMap<String, double[]> spreadMap = new HashMap<>(); HashMap<String, double[]> coverageMap = new HashMap<>(); mocaito: { for (String algorithm : algorithms) { double[] mecbaIGDs = new double[EXECUTIONS]; double[] mecbaGDs = new double[EXECUTIONS]; double[] mecbaSpreads = new double[EXECUTIONS]; double[] mecbaCoverages = new double[EXECUTIONS]; for (int i = 0; i < EXECUTIONS; i++) { double[][] mecbaFront = metricsUtil.readFront("resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem + "-" + i + ".NaoDominadas"); mecbaIGDs[i] = igd.invertedGenerationalDistance(mecbaFront, trueFrontMatrix, objectives); mecbaGDs[i] = gd.generationalDistance(mecbaFront, trueFrontMatrix, objectives); mecbaSpreads[i] = spread.spread(mecbaFront, trueFrontMatrix, objectives); mecbaCoverages[i] = coverage.coverage(mecbaFront, trueFrontMatrix); } igdMap.put(algorithm, mecbaIGDs); gdMap.put(algorithm, mecbaGDs); spreadMap.put(algorithm, mecbaSpreads); coverageMap.put(algorithm, mecbaCoverages); } } for (String algorithm : algorithms) { for (String heuristic : heuristicFunctions) { double[] hhIGDs = new double[EXECUTIONS]; double[] hhGDs = new double[EXECUTIONS]; double[] hhSpreads = new double[EXECUTIONS]; double[] hhCoverages = new double[EXECUTIONS]; for (int i = 0; i < EXECUTIONS; i++) { double[][] hhFront = metricsUtil .readFront("experiment/" + algorithm + "/" + objectives + "objectives/" + heuristic + "/" + problem + "/EXECUTION_" + i + "/FUN.txt"); hhIGDs[i] = igd.invertedGenerationalDistance(hhFront, trueFrontMatrix, objectives); hhGDs[i] = gd.generationalDistance(hhFront, trueFrontMatrix, objectives); hhSpreads[i] = spread.spread(hhFront, trueFrontMatrix, objectives); hhCoverages[i] = coverage.coverage(hhFront, trueFrontMatrix); } igdMap.put(algorithm + "-" + heuristic, hhIGDs); gdMap.put(algorithm + "-" + heuristic, hhGDs); spreadMap.put(algorithm + "-" + heuristic, hhSpreads); coverageMap.put(algorithm + "-" + heuristic, hhCoverages); } } HashMap<String, HashMap<String, Boolean>> igdResult = KruskalWallisTest.test(igdMap); HashMap<String, HashMap<String, Boolean>> gdResult = KruskalWallisTest.test(gdMap); HashMap<String, HashMap<String, Boolean>> spreadResult = KruskalWallisTest.test(spreadMap); HashMap<String, HashMap<String, Boolean>> coverageResult = KruskalWallisTest .test(coverageMap); latexTableBuilder.append("\t\t").append(problem); String latexTable = latexTableBuilder.toString(); latexTable = latexTable.replaceAll("O[OA]\\_", "").replaceAll("ChoiceFunction", "CF") .replaceAll("MultiArmedBandit", "MAB"); IGDWriter.write(latexTable.replaceAll("INDICATOR", "IGD")); spreadWriter.write(latexTable.replaceAll("INDICATOR", "Spread")); GDWriter.write(latexTable.replaceAll("INDICATOR", "GD")); coverageWriter.write(latexTable.replaceAll("INDICATOR", "Coverage")); latexTableBuilder = new StringBuilder(); String bestHeuristicIGD = "NULL"; String bestHeuristicGD = "NULL"; String bestHeuristicSpread = "NULL"; String bestHeuristicCoverage = "NULL"; getBest: { double bestMeanIGD = Double.POSITIVE_INFINITY; double bestMeanGD = Double.POSITIVE_INFINITY; double bestMeanSpread = Double.NEGATIVE_INFINITY; double bestMeanCoverage = Double.NEGATIVE_INFINITY; for (String heuristic : igdMap.keySet()) { double heuristicMeanIGD = mean.evaluate(igdMap.get(heuristic)); double heuristicMeanGD = mean.evaluate(gdMap.get(heuristic)); double heuristicMeanSpread = mean.evaluate(spreadMap.get(heuristic)); double heuristicMeanCoverage = mean.evaluate(coverageMap.get(heuristic)); if (heuristicMeanIGD < bestMeanIGD) { bestMeanIGD = heuristicMeanIGD; bestHeuristicIGD = heuristic; } if (heuristicMeanGD < bestMeanGD) { bestMeanGD = heuristicMeanGD; bestHeuristicGD = heuristic; } if (heuristicMeanSpread > bestMeanSpread) { bestMeanSpread = heuristicMeanSpread; bestHeuristicSpread = heuristic; } if (heuristicMeanCoverage > bestMeanCoverage) { bestMeanCoverage = heuristicMeanCoverage; bestHeuristicCoverage = heuristic; } } } StringBuilder igdBuilder = new StringBuilder(); StringBuilder gdBuilder = new StringBuilder(); StringBuilder spreadBuilder = new StringBuilder(); StringBuilder coverageBuilder = new StringBuilder(); String[] newHeuristicFunctions = new String[heuristicFunctions.length * algorithms.length + algorithms.length]; fulfillNewHeuristics: { int i = 0; for (String algorithm : algorithms) { newHeuristicFunctions[i++] = algorithm; for (String heuristicFunction : heuristicFunctions) { newHeuristicFunctions[i++] = algorithm + "-" + heuristicFunction; } } } for (String heuristic : newHeuristicFunctions) { igdBuilder.append(" & "); boolean bold = heuristic.equals(bestHeuristicIGD) || !igdResult.get(heuristic).get(bestHeuristicIGD); if (bold) { igdBuilder.append("\\textbf{"); } igdBuilder.append(decimalFormatter.format(mean.evaluate(igdMap.get(heuristic))) + " (" + decimalFormatter.format(standardDeviation.evaluate(igdMap.get(heuristic))) + ")"); if (bold) { igdBuilder.append("}"); } gdBuilder.append(" & "); bold = heuristic.equals(bestHeuristicGD) || !gdResult.get(heuristic).get(bestHeuristicGD); if (bold) { gdBuilder.append("\\textbf{"); } gdBuilder.append(decimalFormatter.format(mean.evaluate(gdMap.get(heuristic))) + " (" + decimalFormatter.format(standardDeviation.evaluate(gdMap.get(heuristic))) + ")"); if (bold) { gdBuilder.append("}"); } spreadBuilder.append(" & "); bold = heuristic.equals(bestHeuristicSpread) || !spreadResult.get(heuristic).get(bestHeuristicSpread); if (bold) { spreadBuilder.append("\\textbf{"); } spreadBuilder.append(decimalFormatter.format(mean.evaluate(spreadMap.get(heuristic))) + " (" + decimalFormatter.format(standardDeviation.evaluate(spreadMap.get(heuristic))) + ")"); if (bold) { spreadBuilder.append("}"); } coverageBuilder.append(" & "); bold = heuristic.equals(bestHeuristicCoverage) || !coverageResult.get(heuristic).get(bestHeuristicCoverage); if (bold) { coverageBuilder.append("\\textbf{"); } coverageBuilder .append(decimalFormatter.format(mean.evaluate(coverageMap.get(heuristic)))) .append(" (") .append(decimalFormatter .format(standardDeviation.evaluate(coverageMap.get(heuristic)))) .append(")"); if (bold) { coverageBuilder.append("}"); } } IGDWriter.write(igdBuilder + "\\\\\n"); spreadWriter.write(spreadBuilder + "\\\\\n"); GDWriter.write(gdBuilder + "\\\\\n"); coverageWriter.write(coverageBuilder + "\\\\\n"); } latexTableBuilder.append("\t\t\\bottomrule\n").append("\t\\end{tabulary}\n") .append("\\end{table}\n\n"); } latexTableBuilder.append("\\end{landscape}\n\n").append("\\end{document}"); String latexTable = latexTableBuilder.toString(); IGDWriter.write(latexTable); spreadWriter.write(latexTable); GDWriter.write(latexTable); coverageWriter.write(latexTable); } } }
From source file:com.linkedin.databus.container.netty.HttpRelay.java
public static void main(String[] args) throws Exception { Cli cli = new Cli(); cli.processCommandLineArgs(args);/*w w w . j av a 2 s. c o m*/ cli.parseRelayConfig(); StaticConfig staticConfig = cli.getRelayConfigBuilder().build(); HttpRelay relay = new HttpRelay(staticConfig, cli.getPhysicalSourceStaticConfigs()); RequestProcessorRegistry processorRegistry = relay.getProcessorRegistry(); //Changes to add schemaId to event; DDSDBUS-3421 //The long term fix is to remove DatabusEventRandomProducer in favour of RelayEventGenerator //The medium term fix is to send SchemaRegistry to DatabusEventRandomProducer, but move RandomProducer to databus-relay-impl (from databus-core-impl) //Reason: SchemaHelper classes required to parse/generate schemaId from schemaRegistry requires databus-schemas-core which depends on databus-core-impl SchemaRegistryService sr = relay.getSchemaRegistryService(); HashMap<Long, byte[]> schemaIds = new HashMap<Long, byte[]>(staticConfig.getSourceIds().size()); for (IdNamePair pair : staticConfig.getSourceIds()) { LOG.info("Http Relay Schema Reg:" + pair.getName() + " id=" + pair.getId()); String schemaStr = sr.fetchLatestSchemaBySourceName(pair.getName()); if (schemaStr != null) { Schema s = Schema.parse(schemaStr); byte[] sid = SchemaHelper.getSchemaId(s.toString()); LOG.info("Found schema! Adding schemaId for sourceName=" + pair.getName() + " id=" + pair.getId() + " schemaId=" + sid); schemaIds.put(pair.getId(), sid); } else { byte[] defaultSid = "abcde".getBytes(Charset.defaultCharset()); LOG.info("Didn't find schema! Adding default schemaId for sourceName=" + pair.getName() + "id=" + pair.getId() + " schemaId=" + defaultSid); schemaIds.put(pair.getId(), defaultSid); } } DatabusEventProducer randomEventProducer = new DatabusEventRandomProducer(relay.getEventBuffer(), 10, 100, 1000, staticConfig.getSourceIds(), schemaIds); // specify stats collector for this producer ((DatabusEventRandomProducer) randomEventProducer) .setStatsCollector(relay.getInboundEventStatisticsCollector()); processorRegistry.register(EchoRequestProcessor.COMMAND_NAME, new EchoRequestProcessor(null)); processorRegistry.register(SleepRequestProcessor.COMMAND_NAME, new SleepRequestProcessor(null)); processorRegistry.register(GenerateDataEventsRequestProcessor.COMMAND_NAME, new GenerateDataEventsRequestProcessor(null, relay, randomEventProducer)); processorRegistry.register(LoadDataEventsRequestProcessor.COMMAND_NAME, new LoadDataEventsRequestProcessor(relay.getDefaultExecutorService(), relay)); LOG.info("source = " + relay.getSourcesIdNameRegistry().getAllSources()); try { relay.registerShutdownHook(); relay.startAndBlock(); } catch (Exception e) { LOG.error("Error starting the relay", e); } LOG.info("Exiting relay"); }
From source file:Gen.java
public static void main(String[] args) throws Exception { try {/*from w w w .j a va 2 s . c om*/ File[] files = null; if (System.getProperty("dir") != null && !System.getProperty("dir").equals("")) { files = new File(System.getProperty("dir")).listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.toUpperCase().endsWith(".XML"); }; }); } else { String fileName = System.getProperty("file") != null && !System.getProperty("file").equals("") ? System.getProperty("file") : "rjmap.xml"; files = new File[] { new File(fileName) }; } log.info("files : " + Arrays.toString(files)); if (files == null || files.length == 0) { log.info("no files to parse"); System.exit(0); } boolean formatsource = true; if (System.getProperty("formatsource") != null && !System.getProperty("formatsource").equals("") && System.getProperty("formatsource").equalsIgnoreCase("false")) { formatsource = false; } GEN_ROOT = System.getProperty("outputdir"); if (GEN_ROOT == null || GEN_ROOT.equals("")) { GEN_ROOT = new File(files[0].getAbsolutePath()).getParent() + FILE_SEPARATOR + "distrib"; } GEN_ROOT = new File(GEN_ROOT).getAbsolutePath().replace('\\', '/'); if (GEN_ROOT.endsWith("/")) GEN_ROOT = GEN_ROOT.substring(0, GEN_ROOT.length() - 1); System.out.println("GEN ROOT:" + GEN_ROOT); MAPPING_JAR_NAME = System.getProperty("mappingjar") != null && !System.getProperty("mappingjar").equals("") ? System.getProperty("mappingjar") : "mapping.jar"; if (!MAPPING_JAR_NAME.endsWith(".jar")) MAPPING_JAR_NAME += ".jar"; GEN_ROOT_SRC = GEN_ROOT + FILE_SEPARATOR + "src"; GEN_ROOT_LIB = GEN_ROOT + FILE_SEPARATOR + ""; DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance(); domFactory.setNamespaceAware(true); domFactory.setValidating(false); DocumentBuilder documentBuilder = domFactory.newDocumentBuilder(); for (int f = 0; f < files.length; ++f) { log.info("parsing file : " + files[f]); Document document = documentBuilder.parse(files[f]); Vector<Node> initNodes = new Vector<Node>(); Utils.catchNodes(Utils.catchNode(document.getDocumentElement(), "scripts"), "initScript", initNodes); for (int i = 0; i < initNodes.size(); ++i) { NamedNodeMap attrs = initNodes.elementAt(i).getAttributes(); boolean embed = attrs.getNamedItem("embed") != null && attrs.getNamedItem("embed").getNodeValue().equalsIgnoreCase("true"); StringBuffer vbuffer = new StringBuffer(); if (attrs.getNamedItem("inline") != null) { vbuffer.append(attrs.getNamedItem("inline").getNodeValue()); vbuffer.append('\n'); } else { String fname = attrs.getNamedItem("name").getNodeValue(); if (!fname.startsWith("\\") && !fname.startsWith("/") && fname.toCharArray()[1] != ':') { String path = files[f].getAbsolutePath(); path = path.substring(0, path.lastIndexOf(FILE_SEPARATOR)); fname = new File(path + FILE_SEPARATOR + fname).getCanonicalPath(); } vbuffer.append(Utils.getFileAsStringBuffer(fname)); } initScriptBuffer.append(vbuffer); if (embed) embedScriptBuffer.append(vbuffer); } Vector<Node> packageInitNodes = new Vector<Node>(); Utils.catchNodes(Utils.catchNode(document.getDocumentElement(), "scripts"), "packageScript", packageInitNodes); for (int i = 0; i < packageInitNodes.size(); ++i) { NamedNodeMap attrs = packageInitNodes.elementAt(i).getAttributes(); String packageName = attrs.getNamedItem("package").getNodeValue(); if (packageName.equals("")) packageName = "rGlobalEnv"; if (!packageName.endsWith("Function")) packageName += "Function"; if (packageEmbedScriptHashMap.get(packageName) == null) { packageEmbedScriptHashMap.put(packageName, new StringBuffer()); } StringBuffer vbuffer = packageEmbedScriptHashMap.get(packageName); // if (!packageName.equals("rGlobalEnvFunction")) { // vbuffer.append("library("+packageName.substring(0,packageName.lastIndexOf("Function"))+")\n"); // } if (attrs.getNamedItem("inline") != null) { vbuffer.append(attrs.getNamedItem("inline").getNodeValue() + "\n"); initScriptBuffer.append(attrs.getNamedItem("inline").getNodeValue() + "\n"); } else { String fname = attrs.getNamedItem("name").getNodeValue(); if (!fname.startsWith("\\") && !fname.startsWith("/") && fname.toCharArray()[1] != ':') { String path = files[f].getAbsolutePath(); path = path.substring(0, path.lastIndexOf(FILE_SEPARATOR)); fname = new File(path + FILE_SEPARATOR + fname).getCanonicalPath(); } StringBuffer fileBuffer = Utils.getFileAsStringBuffer(fname); vbuffer.append(fileBuffer); initScriptBuffer.append(fileBuffer); } } Vector<Node> functionsNodes = new Vector<Node>(); Utils.catchNodes(Utils.catchNode(document.getDocumentElement(), "functions"), "function", functionsNodes); for (int i = 0; i < functionsNodes.size(); ++i) { NamedNodeMap attrs = functionsNodes.elementAt(i).getAttributes(); String functionName = attrs.getNamedItem("name").getNodeValue(); boolean forWeb = attrs.getNamedItem("forWeb") != null && attrs.getNamedItem("forWeb").getNodeValue().equalsIgnoreCase("true"); String signature = (attrs.getNamedItem("signature") == null ? "" : attrs.getNamedItem("signature").getNodeValue() + ","); String renameTo = (attrs.getNamedItem("renameTo") == null ? null : attrs.getNamedItem("renameTo").getNodeValue()); HashMap<String, FAttributes> sigMap = Globals._functionsToPublish.get(functionName); if (sigMap == null) { sigMap = new HashMap<String, FAttributes>(); Globals._functionsToPublish.put(functionName, sigMap); if (attrs.getNamedItem("returnType") == null) { _functionsVector.add(new String[] { functionName }); } else { _functionsVector.add( new String[] { functionName, attrs.getNamedItem("returnType").getNodeValue() }); } } sigMap.put(signature, new FAttributes(renameTo, forWeb)); if (forWeb) _webPublishingEnabled = true; } if (System.getProperty("targetjdk") != null && !System.getProperty("targetjdk").equals("") && System.getProperty("targetjdk").compareTo("1.5") < 0) { if (_webPublishingEnabled || (System.getProperty("ws.r.api") != null && System.getProperty("ws.r.api").equalsIgnoreCase("true"))) { log.info("be careful, web publishing disabled beacuse target JDK<1.5"); } _webPublishingEnabled = false; } else { if (System.getProperty("ws.r.api") == null || System.getProperty("ws.r.api").equals("") || !System.getProperty("ws.r.api").equalsIgnoreCase("false")) { _webPublishingEnabled = true; } if (_webPublishingEnabled && System.getProperty("java.version").compareTo("1.5") < 0) { log.info("be careful, web publishing disabled beacuse a JDK<1.5 is in use"); _webPublishingEnabled = false; } } Vector<Node> s4Nodes = new Vector<Node>(); Utils.catchNodes(Utils.catchNode(document.getDocumentElement(), "s4classes"), "class", s4Nodes); if (s4Nodes.size() > 0) { String formalArgs = ""; String signature = ""; for (int i = 0; i < s4Nodes.size(); ++i) { NamedNodeMap attrs = s4Nodes.elementAt(i).getAttributes(); String s4Name = attrs.getNamedItem("name").getNodeValue(); formalArgs += "p" + i + (i == s4Nodes.size() - 1 ? "" : ","); signature += "'" + s4Name + "'" + (i == s4Nodes.size() - 1 ? "" : ","); } String genBeansScriptlet = "setGeneric('" + PUBLISH_S4_HEADER + "', function(" + formalArgs + ") standardGeneric('" + PUBLISH_S4_HEADER + "'));" + "setMethod('" + PUBLISH_S4_HEADER + "', signature(" + signature + ") , function(" + formalArgs + ") { })"; initScriptBuffer.append(genBeansScriptlet); _functionsVector.add(new String[] { PUBLISH_S4_HEADER, "numeric" }); } } if (!new File(GEN_ROOT_LIB).exists()) regenerateDir(GEN_ROOT_LIB); else { clean(GEN_ROOT_LIB, true); } for (int i = 0; i < rwebservicesScripts.length; ++i) DirectJNI.getInstance().getRServices().sourceFromResource(rwebservicesScripts[i]); String lastStatus = DirectJNI.getInstance().runR(new ExecutionUnit() { public void run(Rengine e) { DirectJNI.getInstance().toggleMarker(); DirectJNI.getInstance().sourceFromBuffer(initScriptBuffer.toString()); log.info(" init script status : " + DirectJNI.getInstance().cutStatusSinceMarker()); for (int i = 0; i < _functionsVector.size(); ++i) { String[] functionPair = _functionsVector.elementAt(i); log.info("dealing with : " + functionPair[0]); regenerateDir(GEN_ROOT_SRC); String createMapStr = "createMap("; boolean isGeneric = e.rniGetBoolArrayI( e.rniEval(e.rniParse("isGeneric(\"" + functionPair[0] + "\")", 1), 0))[0] == 1; log.info("is Generic : " + isGeneric); if (isGeneric) { createMapStr += functionPair[0]; } else { createMapStr += "\"" + functionPair[0] + "\""; } createMapStr += ", outputDirectory=\"" + GEN_ROOT_SRC .substring(0, GEN_ROOT_SRC.length() - "/src".length()).replace('\\', '/') + "\""; createMapStr += ", typeMode=\"robject\""; createMapStr += (functionPair.length == 1 || functionPair[1] == null || functionPair[1].trim().equals("") ? "" : ", S4DefaultTypedSig=TypedSignature(returnType=\"" + functionPair[1] + "\")"); createMapStr += ")"; log.info("------------------------------------------"); log.info("-- createMapStr=" + createMapStr); DirectJNI.getInstance().toggleMarker(); e.rniEval(e.rniParse(createMapStr, 1), 0); String createMapStatus = DirectJNI.getInstance().cutStatusSinceMarker(); log.info(" createMap status : " + createMapStatus); log.info("------------------------------------------"); deleteDir(GEN_ROOT_SRC + "/org/kchine/r/rserviceJms"); compile(GEN_ROOT_SRC); jar(GEN_ROOT_SRC, GEN_ROOT_LIB + FILE_SEPARATOR + TEMP_JARS_PREFIX + i + ".jar", null); URL url = null; try { url = new URL( "jar:file:" + (GEN_ROOT_LIB + FILE_SEPARATOR + TEMP_JARS_PREFIX + i + ".jar") .replace('\\', '/') + "!/"); } catch (Exception ex) { ex.printStackTrace(); } DirectJNI.generateMaps(url, true); } } }); log.info(lastStatus); log.info(DirectJNI._rPackageInterfacesHash); regenerateDir(GEN_ROOT_SRC); for (int i = 0; i < _functionsVector.size(); ++i) { unjar(GEN_ROOT_LIB + FILE_SEPARATOR + TEMP_JARS_PREFIX + i + ".jar", GEN_ROOT_SRC); } regenerateRPackageClass(true); generateS4BeanRef(); if (formatsource) applyJalopy(GEN_ROOT_SRC); compile(GEN_ROOT_SRC); for (String k : DirectJNI._rPackageInterfacesHash.keySet()) { Rmic rmicTask = new Rmic(); rmicTask.setProject(_project); rmicTask.setTaskName("rmic_packages"); rmicTask.setClasspath(new Path(_project, GEN_ROOT_SRC)); rmicTask.setBase(new File(GEN_ROOT_SRC)); rmicTask.setClassname(k + "ImplRemote"); rmicTask.init(); rmicTask.execute(); } // DirectJNI._rPackageInterfacesHash=new HashMap<String, // Vector<Class<?>>>(); // DirectJNI._rPackageInterfacesHash.put("org.bioconductor.packages.rGlobalEnv.rGlobalEnvFunction",new // Vector<Class<?>>()); if (_webPublishingEnabled) { jar(GEN_ROOT_SRC, GEN_ROOT_LIB + FILE_SEPARATOR + "__temp.jar", null); URL url = new URL( "jar:file:" + (GEN_ROOT_LIB + FILE_SEPARATOR + "__temp.jar").replace('\\', '/') + "!/"); ClassLoader cl = new URLClassLoader(new URL[] { url }, Globals.class.getClassLoader()); for (String className : DirectJNI._rPackageInterfacesHash.keySet()) { if (cl.loadClass(className + "Web").getDeclaredMethods().length == 0) continue; log.info("######## " + className); WsGen wsgenTask = new WsGen(); wsgenTask.setProject(_project); wsgenTask.setTaskName("wsgen"); FileSet rjb_fileSet = new FileSet(); rjb_fileSet.setProject(_project); rjb_fileSet.setDir(new File(".")); rjb_fileSet.setIncludes("RJB.jar"); DirSet src_dirSet = new DirSet(); src_dirSet.setDir(new File(GEN_ROOT_LIB + FILE_SEPARATOR + "src/")); Path classPath = new Path(_project); classPath.addFileset(rjb_fileSet); classPath.addDirset(src_dirSet); wsgenTask.setClasspath(classPath); wsgenTask.setKeep(true); wsgenTask.setDestdir(new File(GEN_ROOT_LIB + FILE_SEPARATOR + "src/")); wsgenTask.setResourcedestdir(new File(GEN_ROOT_LIB + FILE_SEPARATOR + "src/")); wsgenTask.setSei(className + "Web"); wsgenTask.init(); wsgenTask.execute(); } new File(GEN_ROOT_LIB + FILE_SEPARATOR + "__temp.jar").delete(); } embedRScripts(); HashMap<String, String> marker = new HashMap<String, String>(); marker.put("RJBMAPPINGJAR", "TRUE"); Properties props = new Properties(); props.put("PACKAGE_NAMES", PoolUtils.objectToHex(DirectJNI._packageNames)); props.put("S4BEANS_MAP", PoolUtils.objectToHex(DirectJNI._s4BeansMapping)); props.put("S4BEANS_REVERT_MAP", PoolUtils.objectToHex(DirectJNI._s4BeansMappingRevert)); props.put("FACTORIES_MAPPING", PoolUtils.objectToHex(DirectJNI._factoriesMapping)); props.put("S4BEANS_HASH", PoolUtils.objectToHex(DirectJNI._s4BeansHash)); props.put("R_PACKAGE_INTERFACES_HASH", PoolUtils.objectToHex(DirectJNI._rPackageInterfacesHash)); props.put("ABSTRACT_FACTORIES", PoolUtils.objectToHex(DirectJNI._abstractFactories)); new File(GEN_ROOT_SRC + "/" + "maps").mkdirs(); FileOutputStream fos = new FileOutputStream(GEN_ROOT_SRC + "/" + "maps/rjbmaps.xml"); props.storeToXML(fos, null); fos.close(); jar(GEN_ROOT_SRC, GEN_ROOT_LIB + FILE_SEPARATOR + MAPPING_JAR_NAME, marker); if (_webPublishingEnabled) genWeb(); DirectJNI._mappingClassLoader = null; } finally { System.exit(0); } }