List of usage examples for java.lang StringBuilder append
@Override public StringBuilder append(double d)
From source file:de.tudarmstadt.ukp.experiments.dip.wp1.documents.Step8GoldDataAggregator.java
public static void main(String[] args) throws Exception { String inputDir = args[0] + "/"; // output dir File outputDir = new File(args[1]); File turkersConfidence = new File(args[2]); if (outputDir.exists()) { outputDir.delete();/*from w w w. j a v a 2s.c o m*/ } outputDir.mkdir(); List<String> annotatorsIDs = new ArrayList<>(); // for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) { // QueryResultContainer queryResultContainer = QueryResultContainer // .fromXML(FileUtils.readFileToString(f, "utf-8")); // for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { // for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) { // if (!annotatorsIDs.contains(relevanceVote.turkID)) // annotatorsIDs.add(relevanceVote.turkID); // } // } // } HashMap<String, Integer> countVotesForATurker = new HashMap<>(); // creates temporary file with format for mace // Hashmap annotations: key is the id of a document and a sentence // Value is an array votes[] of turkers decisions: true or false (relevant or not) // the length of this array equals the number of annotators in List<String> annotatorsIDs. // If an annotator worked on the task his decision is written in the array otherwise the value is NULL // key: queryID + clueWebID + sentenceID // value: true and false annotations TreeMap<String, Annotations> annotations = new TreeMap<>(); for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) { QueryResultContainer queryResultContainer = QueryResultContainer .fromXML(FileUtils.readFileToString(f, "utf-8")); System.out.println("Reading " + f.getName()); for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { String documentID = rankedResults.clueWebID; for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) { Integer turkerID; if (!annotatorsIDs.contains(relevanceVote.turkID)) { annotatorsIDs.add(relevanceVote.turkID); turkerID = annotatorsIDs.size() - 1; } else { turkerID = annotatorsIDs.indexOf(relevanceVote.turkID); } Integer count = countVotesForATurker.get(relevanceVote.turkID); if (count == null) { count = 0; } count++; countVotesForATurker.put(relevanceVote.turkID, count); String id; List<Integer> trueVotes; List<Integer> falseVotes; for (QueryResultContainer.SingleSentenceRelevanceVote singleSentenceRelevanceVote : relevanceVote.singleSentenceRelevanceVotes) if (!"".equals(singleSentenceRelevanceVote.sentenceID)) { id = f.getName() + "_" + documentID + "_" + singleSentenceRelevanceVote.sentenceID; Annotations turkerVotes = annotations.get(id); if (turkerVotes == null) { trueVotes = new ArrayList<>(); falseVotes = new ArrayList<>(); turkerVotes = new Annotations(trueVotes, falseVotes); } trueVotes = turkerVotes.trueAnnotations; falseVotes = turkerVotes.falseAnnotations; if ("true".equals(singleSentenceRelevanceVote.relevant)) { // votes[turkerID] = true; trueVotes.add(turkerID); } else if ("false".equals(singleSentenceRelevanceVote.relevant)) { // votes[turkerID] = false; falseVotes.add(turkerID); } else { throw new IllegalStateException("Annotation value of sentence " + singleSentenceRelevanceVote.sentenceID + " in " + rankedResults.clueWebID + " equals " + singleSentenceRelevanceVote.relevant); } try { int allVotesCount = trueVotes.size() + falseVotes.size(); if (allVotesCount > 5) { System.err.println(id + " doesn't have 5 annotators: true: " + trueVotes.size() + " false: " + falseVotes.size()); // nasty hack, we're gonna strip some data; true votes first /* we can't do that, it breaks something down the line int toRemove = allVotesCount - 5; if (trueVotes.size() >= toRemove) { trueVotes = trueVotes .subList(0, trueVotes.size() - toRemove); } else if ( falseVotes.size() >= toRemove) { falseVotes = falseVotes .subList(0, trueVotes.size() - toRemove); } */ System.err.println("Adjusted: " + id + " doesn't have 5 annotators: true: " + trueVotes.size() + " false: " + falseVotes.size()); } } catch (IllegalStateException e) { e.printStackTrace(); } turkerVotes.trueAnnotations = trueVotes; turkerVotes.falseAnnotations = falseVotes; annotations.put(id, turkerVotes); } else { throw new IllegalStateException( "Empty Sentence ID in " + f.getName() + " for turker " + turkerID); } } } } File tmp = printHashMap(annotations, annotatorsIDs.size()); String file = TEMP_DIR + "/" + tmp.getName(); MACE.main(new String[] { "--prefix", file }); //gets the keys of the documents and sentences ArrayList<String> lines = (ArrayList<String>) FileUtils.readLines(new File(file + ".prediction")); int i = 0; TreeMap<String, TreeMap<String, ArrayList<HashMap<String, String>>>> ids = new TreeMap<>(); ArrayList<HashMap<String, String>> sentences; if (lines.size() != annotations.size()) { throw new IllegalStateException( "The size of prediction file is " + lines.size() + "but expected " + annotations.size()); } for (Map.Entry entry : annotations.entrySet()) { //1001.xml_clueweb12-1905wb-13-07360_8783 String key = (String) entry.getKey(); String[] IDs = key.split("_"); if (IDs.length > 2) { String queryID = IDs[0]; String clueWebID = IDs[1]; String sentenceID = IDs[2]; TreeMap<String, ArrayList<HashMap<String, String>>> clueWebIDs = ids.get(queryID); if (clueWebIDs == null) { clueWebIDs = new TreeMap<>(); } sentences = clueWebIDs.get(clueWebID); if (sentences == null) { sentences = new ArrayList<>(); } HashMap<String, String> sentence = new HashMap<>(); sentence.put(sentenceID, lines.get(i)); sentences.add(sentence); clueWebIDs.put(clueWebID, sentences); ids.put(queryID, clueWebIDs); } else { throw new IllegalStateException("Wrong ID " + key); } i++; } for (Map.Entry entry : ids.entrySet()) { TreeMap<Integer, String> value = (TreeMap<Integer, String>) entry.getValue(); String queryID = (String) entry.getKey(); QueryResultContainer queryResultContainer = QueryResultContainer .fromXML(FileUtils.readFileToString(new File(inputDir, queryID), "utf-8")); for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { for (Map.Entry val : value.entrySet()) { String clueWebID = (String) val.getKey(); if (clueWebID.equals(rankedResults.clueWebID)) { List<QueryResultContainer.SingleSentenceRelevanceVote> goldEstimatedLabels = new ArrayList<>(); List<QueryResultContainer.SingleSentenceRelevanceVote> turkersVotes = new ArrayList<>(); int size = 0; int hitSize = 0; String hitID = ""; for (QueryResultContainer.MTurkRelevanceVote vote : rankedResults.mTurkRelevanceVotes) { if (!hitID.equals(vote.hitID)) { hitID = vote.hitID; hitSize = vote.singleSentenceRelevanceVotes.size(); size = size + hitSize; turkersVotes.addAll(vote.singleSentenceRelevanceVotes); } else { if (vote.singleSentenceRelevanceVotes.size() != hitSize) { hitSize = vote.singleSentenceRelevanceVotes.size(); size = size + hitSize; turkersVotes.addAll(vote.singleSentenceRelevanceVotes); } } } ArrayList<HashMap<String, String>> sentenceList = (ArrayList<HashMap<String, String>>) val .getValue(); if (sentenceList.size() != turkersVotes.size()) { try { throw new IllegalStateException("Expected size of annotations is " + turkersVotes.size() + "but found " + sentenceList.size() + " for document " + rankedResults.clueWebID + " in " + queryID); } catch (IllegalStateException ex) { ex.printStackTrace(); } } for (QueryResultContainer.SingleSentenceRelevanceVote s : turkersVotes) { String valSentence = null; for (HashMap<String, String> anno : sentenceList) { if (anno.keySet().contains(s.sentenceID)) { valSentence = anno.get(s.sentenceID); } } QueryResultContainer.SingleSentenceRelevanceVote singleSentenceVote = new QueryResultContainer.SingleSentenceRelevanceVote(); singleSentenceVote.sentenceID = s.sentenceID; if (("false").equals(valSentence)) { singleSentenceVote.relevant = "false"; } else if (("true").equals(valSentence)) { singleSentenceVote.relevant = "true"; } else { throw new IllegalStateException("Annotation value of sentence " + singleSentenceVote.sentenceID + " equals " + val.getValue()); } goldEstimatedLabels.add(singleSentenceVote); } rankedResults.goldEstimatedLabels = goldEstimatedLabels; } } } File outputFile = new File(outputDir, queryID); FileUtils.writeStringToFile(outputFile, queryResultContainer.toXML(), "utf-8"); System.out.println("Finished " + outputFile); } ArrayList<String> annotators = (ArrayList<String>) FileUtils.readLines(new File(file + ".competence")); FileWriter fileWriter; StringBuilder sb = new StringBuilder(); for (int j = 0; j < annotatorsIDs.size(); j++) { String[] s = annotators.get(0).split("\t"); Float score = Float.parseFloat(s[j]); String turkerID = annotatorsIDs.get(j); System.out.println(turkerID + " " + score + " " + countVotesForATurker.get(turkerID)); sb.append(turkerID).append(" ").append(score).append(" ").append(countVotesForATurker.get(turkerID)) .append("\n"); } fileWriter = new FileWriter(turkersConfidence); fileWriter.append(sb.toString()); fileWriter.close(); }
From source file:net.modelbased.proasense.storage.registry.StorageRegistryScrapRateTestClient.java
public static void main(String[] args) { // Get client properties from properties file // StorageRegistryScrapRateTestClient client = new StorageRegistryScrapRateTestClient(); // client.loadClientProperties(); // Hardcoded client properties (simple test client) String STORAGE_REGISTRY_SERVICE_URL = "http://192.168.84.34:8080/storage-registry"; // Default HTTP client and common properties for requests HttpClient client = new DefaultHttpClient(); StringBuilder requestUrl = null; List<NameValuePair> params = null; String queryString = null;/*from w ww. ja v a2 s. c o m*/ // Default HTTP response and common properties for responses HttpResponse response = null; ResponseHandler<String> handler = null; int status = 0; String body = null; // Query for machine list requestUrl = new StringBuilder(STORAGE_REGISTRY_SERVICE_URL); requestUrl.append("/query/machine/list"); try { HttpGet query11 = new HttpGet(requestUrl.toString()); query11.setHeader("Content-type", "application/json"); response = client.execute(query11); // Check status code status = response.getStatusLine().getStatusCode(); if (status != 200) { throw new RuntimeException("Failed! HTTP error code: " + status); } // Get body handler = new BasicResponseHandler(); body = handler.handleResponse(response); System.out.println("MACHINE LIST: " + body); } catch (Exception e) { System.out.println(e.getClass().getName() + ": " + e.getMessage()); } // Query for machine properties requestUrl = new StringBuilder(STORAGE_REGISTRY_SERVICE_URL); requestUrl.append("/query/machine/list"); params = new LinkedList<NameValuePair>(); params.add(new BasicNameValuePair("machineId", "IMM1")); queryString = URLEncodedUtils.format(params, "utf-8"); requestUrl.append("?"); requestUrl.append(queryString); try { HttpGet query12 = new HttpGet(requestUrl.toString()); query12.setHeader("Content-type", "application/json"); response = client.execute(query12); // Check status code status = response.getStatusLine().getStatusCode(); if (status != 200) { throw new RuntimeException("Failed! HTTP error code: " + status); } // Get body handler = new BasicResponseHandler(); body = handler.handleResponse(response); System.out.println("MACHINE PROPERTIES: " + body); } catch (Exception e) { System.out.println(e.getClass().getName() + ": " + e.getMessage()); } // Query for sensor list requestUrl = new StringBuilder(STORAGE_REGISTRY_SERVICE_URL); requestUrl.append("/query/sensor/list"); try { HttpGet query21 = new HttpGet(requestUrl.toString()); query21.setHeader("Content-type", "application/json"); response = client.execute(query21); // Check status code status = response.getStatusLine().getStatusCode(); if (status != 200) { throw new RuntimeException("Failed! HTTP error code: " + status); } // Get body handler = new BasicResponseHandler(); body = handler.handleResponse(response); System.out.println("SENSOR LIST: " + body); } catch (Exception e) { System.out.println(e.getClass().getName() + ": " + e.getMessage()); } // Query for sensor properties requestUrl = new StringBuilder(STORAGE_REGISTRY_SERVICE_URL); requestUrl.append("/query/sensor/properties"); params = new LinkedList<NameValuePair>(); params.add(new BasicNameValuePair("sensorId", "dustParticleSensor")); queryString = URLEncodedUtils.format(params, "utf-8"); requestUrl.append("?"); requestUrl.append(queryString); try { HttpGet query22 = new HttpGet(requestUrl.toString()); query22.setHeader("Content-type", "application/json"); response = client.execute(query22); // Check status code status = response.getStatusLine().getStatusCode(); if (status != 200) { throw new RuntimeException("Failed! HTTP error code: " + status); } // Get body handler = new BasicResponseHandler(); body = handler.handleResponse(response); System.out.println("SENSOR PROPERTIES: " + body); } catch (Exception e) { System.out.println(e.getClass().getName() + ": " + e.getMessage()); } // Query for product list requestUrl = new StringBuilder(STORAGE_REGISTRY_SERVICE_URL); requestUrl.append("/query/product/list"); try { HttpGet query31 = new HttpGet(requestUrl.toString()); query31.setHeader("Content-type", "application/json"); response = client.execute(query31); // Check status code status = response.getStatusLine().getStatusCode(); if (status != 200) { throw new RuntimeException("Failed! HTTP error code: " + status); } // Get body handler = new BasicResponseHandler(); body = handler.handleResponse(response); System.out.println("PRODUCT LIST: " + body); } catch (Exception e) { System.out.println(e.getClass().getName() + ": " + e.getMessage()); } // Query for product properties requestUrl = new StringBuilder(STORAGE_REGISTRY_SERVICE_URL); requestUrl.append("/query/product/properties"); params = new LinkedList<NameValuePair>(); params.add(new BasicNameValuePair("productId", "Astra_3300")); queryString = URLEncodedUtils.format(params, "utf-8"); requestUrl.append("?"); requestUrl.append(queryString); try { HttpGet query22 = new HttpGet(requestUrl.toString()); query22.setHeader("Content-type", "application/json"); response = client.execute(query22); // Check status code status = response.getStatusLine().getStatusCode(); if (status != 200) { throw new RuntimeException("Failed! HTTP error code: " + status); } // Get body handler = new BasicResponseHandler(); body = handler.handleResponse(response); System.out.println("PRODUCT PROPERTIES: " + body); } catch (Exception e) { System.out.println(e.getClass().getName() + ": " + e.getMessage()); } // Query for mould list requestUrl = new StringBuilder(STORAGE_REGISTRY_SERVICE_URL); requestUrl.append("/query/mould/list"); try { HttpGet query41 = new HttpGet(requestUrl.toString()); query41.setHeader("Content-type", "application/json"); response = client.execute(query41); // Check status code status = response.getStatusLine().getStatusCode(); if (status != 200) { throw new RuntimeException("Failed! HTTP error code: " + status); } // Get body handler = new BasicResponseHandler(); body = handler.handleResponse(response); System.out.println("MOULD LIST: " + body); } catch (Exception e) { System.out.println(e.getClass().getName() + ": " + e.getMessage()); } // Query for mould properties requestUrl = new StringBuilder(STORAGE_REGISTRY_SERVICE_URL); requestUrl.append("/query/product/properties"); params = new LinkedList<NameValuePair>(); params.add(new BasicNameValuePair("productId", "Astra_3300")); queryString = URLEncodedUtils.format(params, "utf-8"); requestUrl.append("?"); requestUrl.append(queryString); try { HttpGet query42 = new HttpGet(requestUrl.toString()); query42.setHeader("Content-type", "application/json"); response = client.execute(query42); // Check status code status = response.getStatusLine().getStatusCode(); if (status != 200) { throw new RuntimeException("Failed! HTTP error code: " + status); } // Get body handler = new BasicResponseHandler(); body = handler.handleResponse(response); System.out.println("MOULD PROPERTIES: " + body); } catch (Exception e) { System.out.println(e.getClass().getName() + ": " + e.getMessage()); } }
From source file:com.act.biointerpretation.metadata.ProteinMetadataFactory.java
public static void main(String[] args) throws Exception { // TODO: This is referencing a temporary collection. Change it! // TODO: FIX THIS BEFORE MERGE! NoSQLAPI api = new NoSQLAPI("actv01_vijay_proteins", "actv01_vijay_proteins"); Iterator<Reaction> iterator = api.readRxnsFromInKnowledgeGraph(); //Create a single instance of the factory method to use for all json ProteinMetadataFactory factory = ProteinMetadataFactory.initiate(); //Run some tests try {/* w w w. j ava2s. c o m*/ if (factory.testHandlesubunits() == true) { System.out.println("Subunit test OK"); } } catch (Exception err) { System.err.println("Failed to test subunits"); } //Create a list to aggregate the results of the database scan List<ProteinMetadata> agg = new ArrayList<>(); //Scan the database and store ProteinMetadata objects while (iterator.hasNext()) { Reaction rxn = iterator.next(); Reaction.RxnDataSource source = rxn.getDataSource(); if (!source.equals(Reaction.RxnDataSource.BRENDA)) { continue; } Set<JSONObject> jsons = rxn.getProteinData(); for (JSONObject json : jsons) { ProteinMetadata meta = factory.create(json); agg.add(meta); } } //Write out any messages to file StringBuilder sb = new StringBuilder(); for (String aline : factory.dataList) { sb.append(aline).append("\n"); } File outfile = new File("output/ProteinMetadata/Factory_output.txt"); if (outfile.exists()) { outfile.delete(); } FileUtils.writeStringToFile(outfile, sb.toString()); sb = new StringBuilder(); for (String key : factory.dataMap.keySet()) { int value = factory.dataMap.get(key); sb.append(key + "\t" + value + "\n"); } outfile = new File("output/ProteinMetadata/Factory_output_map.txt"); if (outfile.exists()) { outfile.delete(); } FileUtils.writeStringToFile(outfile, sb.toString()); //Count up the results of modifications to get statistics int falsecount = 0; int truecount = 0; int nullcount = 0; for (ProteinMetadata datum : agg) { if (datum == null) { System.err.println("null datum"); continue; } if (datum.modifications == null) { nullcount++; } else if (datum.modifications == false) { falsecount++; } else if (datum.modifications == true) { truecount++; } } System.out.println("Total # protein metadata: " + agg.size()); System.out.println(); System.out.println("modification true count: " + truecount); System.out.println("modification false count: " + falsecount); System.out.println("modification null count: " + nullcount); System.out.println(); //Get some statistics for cloned nullcount = 0; int emptycount = 0; int colicount = 0; int humancount = 0; int bothcount = 0; for (ProteinMetadata datum : agg) { if (datum == null) { System.err.println("null datum"); continue; } if (datum.cloned == null) { nullcount++; continue; } if (datum.cloned.isEmpty()) { emptycount++; continue; } Integer human = datum.cloned.get(Host.Hsapiens); if (human != null && human > 0) { humancount++; } Integer coli = datum.cloned.get(Host.Ecoli); if (coli != null && coli > 0) { colicount++; if (human != null && human > 0) { bothcount++; } } } System.out.println("cloned null count: " + nullcount); System.out.println("cloned empty count: " + emptycount); System.out.println("cloned coli count: " + colicount); System.out.println("cloned human count: " + humancount); System.out.println("cloned both count: " + bothcount); System.out.println(); }
From source file:au.org.ala.layers.intersect.IntersectConfig.java
public static void main(String[] args) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < 100000; i++) { if (i > 0) { sb.append(','); }// w w w. j av a 2 s .com sb.append(Math.random() * (-12 + 44) - 44).append(',').append(Math.random() * (154 - 112) + 112); } try { FileUtils.writeStringToFile(new File("/data/p.txt"), sb.toString()); } catch (IOException e) { logger.error(e.getMessage(), e); } }
From source file:com.enablens.dfa.base.DcnmAuthToken.java
/** * The main method./*from w ww . ja va 2s .c o m*/ * * @param args * the arguments. <Server> <Username> <Password> <Lifetime> * */ public static void main(final String[] args) { StringBuilder sb = new StringBuilder(); if (args.length == ARGCOUNT) { int i = 0; String server = args[i++]; String username = args[i++]; String password = args[i++]; Long lifetime = 0L; try { lifetime = Long.parseLong(args[i++]); } catch (NumberFormatException e) { System.out.println("The last argument must be a number"); return; } DcnmAuthToken dt = new DcnmAuthToken(server, username, password, lifetime); final String token = dt.getToken(); if (dt.getState() == DcnmResponseStates.VALID) { sb.append("DCNM Authentication Token = "); sb.append(token); } else { sb.append("Token returned a state of "); sb.append(dt.getState()); } } else { sb.append("Provide arguments in format of:\n"); sb.append("Server Username Password Lifetime"); } System.out.println(sb); }
From source file:com.github.s4ke.moar.cli.Main.java
public static void main(String[] args) throws ParseException, IOException { // create Options object Options options = new Options(); options.addOption("rf", true, "file containing the regexes to test against (multiple regexes are separated by one empty line)"); options.addOption("r", true, "regex to test against"); options.addOption("mf", true, "file/folder to read the MOA from"); options.addOption("mo", true, "folder to export the MOAs to (overwrites if existent)"); options.addOption("sf", true, "file to read the input string(s) from"); options.addOption("s", true, "string to test the MOA/Regex against"); options.addOption("m", false, "multiline matching mode (search in string for regex)"); options.addOption("ls", false, "treat every line of the input string file as one string"); options.addOption("t", false, "trim lines if -ls is set"); options.addOption("d", false, "only do determinism check"); options.addOption("help", false, "prints this dialog"); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); if (args.length == 0 || cmd.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("moar-cli", options); return;/*from w w w. j a v a 2 s. co m*/ } List<String> patternNames = new ArrayList<>(); List<MoaPattern> patterns = new ArrayList<>(); List<String> stringsToCheck = new ArrayList<>(); if (cmd.hasOption("r")) { String regexStr = cmd.getOptionValue("r"); try { patterns.add(MoaPattern.compile(regexStr)); patternNames.add(regexStr); } catch (Exception e) { System.out.println(e.getMessage()); } } if (cmd.hasOption("rf")) { String fileName = cmd.getOptionValue("rf"); List<String> regexFileContents = readFileContents(new File(fileName)); int emptyLineCountAfterRegex = 0; StringBuilder regexStr = new StringBuilder(); for (String line : regexFileContents) { if (emptyLineCountAfterRegex >= 1) { if (regexStr.length() > 0) { patterns.add(MoaPattern.compile(regexStr.toString())); patternNames.add(regexStr.toString()); } regexStr.setLength(0); emptyLineCountAfterRegex = 0; } if (line.trim().equals("")) { if (regexStr.length() > 0) { ++emptyLineCountAfterRegex; } } else { regexStr.append(line); } } if (regexStr.length() > 0) { try { patterns.add(MoaPattern.compile(regexStr.toString())); patternNames.add(regexStr.toString()); } catch (Exception e) { System.out.println(e.getMessage()); return; } regexStr.setLength(0); } } if (cmd.hasOption("mf")) { String fileName = cmd.getOptionValue("mf"); File file = new File(fileName); if (file.isDirectory()) { System.out.println(fileName + " is a directory, using all *.moar files as patterns"); File[] moarFiles = file.listFiles(pathname -> pathname.getName().endsWith(".moar")); for (File moar : moarFiles) { String jsonString = readWholeFile(moar); patterns.add(MoarJSONSerializer.fromJSON(jsonString)); patternNames.add(moar.getAbsolutePath()); } } else { System.out.println(fileName + " is a single file. using it directly (no check for *.moar suffix)"); String jsonString = readWholeFile(file); patterns.add(MoarJSONSerializer.fromJSON(jsonString)); patternNames.add(fileName); } } if (cmd.hasOption("s")) { String str = cmd.getOptionValue("s"); stringsToCheck.add(str); } if (cmd.hasOption("sf")) { boolean treatLineAsString = cmd.hasOption("ls"); boolean trim = cmd.hasOption("t"); String fileName = cmd.getOptionValue("sf"); StringBuilder stringBuilder = new StringBuilder(); boolean firstLine = true; for (String str : readFileContents(new File(fileName))) { if (treatLineAsString) { if (trim) { str = str.trim(); if (str.length() == 0) { continue; } } stringsToCheck.add(str); } else { if (!firstLine) { stringBuilder.append("\n"); } if (firstLine) { firstLine = false; } stringBuilder.append(str); } } if (!treatLineAsString) { stringsToCheck.add(stringBuilder.toString()); } } if (cmd.hasOption("d")) { //at this point we have already built the Patterns //so just give the user a short note. System.out.println("All Regexes seem to be deterministic."); return; } if (patterns.size() == 0) { System.out.println("no patterns to check"); return; } if (cmd.hasOption("mo")) { String folder = cmd.getOptionValue("mo"); File folderFile = new File(folder); if (!folderFile.exists()) { System.out.println(folder + " does not exist. creating..."); if (!folderFile.mkdirs()) { System.out.println("folder " + folder + " could not be created"); } } int cnt = 0; for (MoaPattern pattern : patterns) { String patternAsJSON = MoarJSONSerializer.toJSON(pattern); try (BufferedWriter writer = new BufferedWriter( new FileWriter(new File(folderFile, "pattern" + ++cnt + ".moar")))) { writer.write(patternAsJSON); } } System.out.println("stored " + cnt + " patterns in " + folder); } if (stringsToCheck.size() == 0) { System.out.println("no strings to check"); return; } boolean multiline = cmd.hasOption("m"); for (String string : stringsToCheck) { int curPattern = 0; for (MoaPattern pattern : patterns) { MoaMatcher matcher = pattern.matcher(string); if (!multiline) { if (matcher.matches()) { System.out.println("\"" + patternNames.get(curPattern) + "\" matches \"" + string + "\""); } else { System.out.println( "\"" + patternNames.get(curPattern) + "\" does not match \"" + string + "\""); } } else { StringBuilder buffer = new StringBuilder(string); int additionalCharsPerMatch = ("<match>" + "</match>").length(); int matchCount = 0; while (matcher.nextMatch()) { buffer.replace(matcher.getStart() + matchCount * additionalCharsPerMatch, matcher.getEnd() + matchCount * additionalCharsPerMatch, "<match>" + string.substring(matcher.getStart(), matcher.getEnd()) + "</match>"); ++matchCount; } System.out.println(buffer.toString()); } } ++curPattern; } }
From source file:fr.ritaly.dungeonmaster.Position.java
public static void main(String[] args) { for (int radius = 1; radius <= 15; radius++) { final int width, height = width = (2 * radius) + 1; final Position center = new Position((width - 1) / 2, (height - 1) / 2, 1); final StringBuilder builder = new StringBuilder(); int insideCount = 0; int outsideCount = 0; for (int x = 0; x < width; x++) { builder.append("+"); builder.append(StringUtils.repeat("---+", width)); builder.append("\n"); builder.append("|"); for (int y = 0; y < height; y++) { final Position position = new Position(x, y, 1); if (center.equals(position)) { builder.append(" P |"); continue; }/*from www. ja v a 2 s. c o m*/ final double distance = Utils.distance(center.x, center.y, position.x, position.y); final boolean inside = (distance <= radius + 0.5d); if (inside) { builder.append(" ").append(Integer.toHexString((int) Math.floor(distance))).append(" |"); insideCount++; } else { builder.append(" |"); outsideCount++; } // System.out.println("Position " + position + " (d: " // + distance + ") -> " + inside); } builder.append("\n"); } builder.append("+"); builder.append(StringUtils.repeat("---+", width)); builder.append("\n"); System.out.println(builder); System.out.println(); System.out.println("Radius: " + radius + " -> Inside: " + insideCount + ", Outside: " + outsideCount); } // for (int j = 0; j < 10; j++) { // final long start = System.nanoTime(); // // final Position position = new Position(1,1,1); // // for (int i = 0; i < 10000; i++) { // position.getSurroundingPositions(6); // } // // System.out.println("Elapsed: " // + ((System.nanoTime() - start) / 1000000) + " ms"); // } }
From source file:tuit.java
@SuppressWarnings("ConstantConditions") public static void main(String[] args) { System.out.println(licence);//from w w w.ja va2 s.c o m //Declare variables File inputFile; File outputFile; File tmpDir; File blastnExecutable; File properties; File blastOutputFile = null; // TUITPropertiesLoader tuitPropertiesLoader; TUITProperties tuitProperties; // String[] parameters = null; // Connection connection = null; MySQL_Connector mySQL_connector; // Map<Ranks, TUITCutoffSet> cutoffMap; // BLASTIdentifier blastIdentifier = null; // RamDb ramDb = null; CommandLineParser parser = new GnuParser(); Options options = new Options(); options.addOption(tuit.IN, "input<file>", true, "Input file (currently fasta-formatted only)"); options.addOption(tuit.OUT, "output<file>", true, "Output file (in " + tuit.TUIT_EXT + " format)"); options.addOption(tuit.P, "prop<file>", true, "Properties file (XML formatted)"); options.addOption(tuit.V, "verbose", false, "Enable verbose output"); options.addOption(tuit.B, "blast_output<file>", true, "Perform on a pre-BLASTed output"); options.addOption(tuit.DEPLOY, "deploy", false, "Deploy the taxonomic databases"); options.addOption(tuit.UPDATE, "update", false, "Update the taxonomic databases"); options.addOption(tuit.USE_DB, "usedb", false, "Use RDBMS instead of RAM-based taxonomy"); Option option = new Option(tuit.REDUCE, "reduce", true, "Pack identical (100% similar sequences) records in the given sample file"); option.setArgs(Option.UNLIMITED_VALUES); options.addOption(option); option = new Option(tuit.COMBINE, "combine", true, "Combine a set of given reduction files into an HMP Tree-compatible taxonomy"); option.setArgs(Option.UNLIMITED_VALUES); options.addOption(option); options.addOption(tuit.NORMALIZE, "normalize", false, "If used in combination with -combine ensures that the values are normalized by the root value"); HelpFormatter formatter = new HelpFormatter(); try { //Get TUIT directory final File tuitDir = new File( new File(tuit.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath()) .getParent()); final File ramDbFile = new File(tuitDir, tuit.RAM_DB); //Setup logger Log.getInstance().setLogName("tuit.log"); //Read command line final CommandLine commandLine = parser.parse(options, args, true); //Check if the REDUCE option is on if (commandLine.hasOption(tuit.REDUCE)) { final String[] fileList = commandLine.getOptionValues(tuit.REDUCE); for (String s : fileList) { final Path path = Paths.get(s); Log.getInstance().log(Level.INFO, "Processing " + path.toString() + "..."); final NucleotideFastaSequenceReductor nucleotideFastaSequenceReductor = NucleotideFastaSequenceReductor .fromPath(path); ReductorFileOperator.save(nucleotideFastaSequenceReductor, path.resolveSibling(path.getFileName().toString() + ".rdc")); } Log.getInstance().log(Level.FINE, "Task done, exiting..."); return; } //Check if COMBINE is on if (commandLine.hasOption(tuit.COMBINE)) { final boolean normalize = commandLine.hasOption(tuit.NORMALIZE); final String[] fileList = commandLine.getOptionValues(tuit.COMBINE); //TODO: implement a test for format here final List<TreeFormatter.TreeFormatterFormat.HMPTreesOutput> hmpTreesOutputs = new ArrayList<>(); final TreeFormatter treeFormatter = TreeFormatter .newInstance(new TreeFormatter.TuitLineTreeFormatterFormat()); for (String s : fileList) { final Path path = Paths.get(s); Log.getInstance().log(Level.INFO, "Merging " + path.toString() + "..."); treeFormatter.loadFromPath(path); final TreeFormatter.TreeFormatterFormat.HMPTreesOutput output = TreeFormatter.TreeFormatterFormat.HMPTreesOutput .newInstance(treeFormatter.toHMPTree(normalize), s.substring(0, s.indexOf("."))); hmpTreesOutputs.add(output); treeFormatter.erase(); } final Path destination; if (commandLine.hasOption(OUT)) { destination = Paths.get(commandLine.getOptionValue(tuit.OUT)); } else { destination = Paths.get("merge.tcf"); } CombinatorFileOperator.save(hmpTreesOutputs, treeFormatter, destination); Log.getInstance().log(Level.FINE, "Task done, exiting..."); return; } if (!commandLine.hasOption(tuit.P)) { throw new ParseException("No properties file option found, exiting."); } else { properties = new File(commandLine.getOptionValue(tuit.P)); } //Load properties tuitPropertiesLoader = TUITPropertiesLoader.newInstanceFromFile(properties); tuitProperties = tuitPropertiesLoader.getTuitProperties(); //Create tmp directory and blastn executable tmpDir = new File(tuitProperties.getTMPDir().getPath()); blastnExecutable = new File(tuitProperties.getBLASTNPath().getPath()); //Check for deploy if (commandLine.hasOption(tuit.DEPLOY)) { if (commandLine.hasOption(tuit.USE_DB)) { NCBITablesDeployer.fastDeployNCBIDatabasesFromNCBI(connection, tmpDir); } else { NCBITablesDeployer.fastDeployNCBIRamDatabaseFromNCBI(tmpDir, ramDbFile); } Log.getInstance().log(Level.FINE, "Task done, exiting..."); return; } //Check for update if (commandLine.hasOption(tuit.UPDATE)) { if (commandLine.hasOption(tuit.USE_DB)) { NCBITablesDeployer.updateDatabasesFromNCBI(connection, tmpDir); } else { //No need to specify a different way to update the database other than just deploy in case of the RAM database NCBITablesDeployer.fastDeployNCBIRamDatabaseFromNCBI(tmpDir, ramDbFile); } Log.getInstance().log(Level.FINE, "Task done, exiting..."); return; } //Connect to the database if (commandLine.hasOption(tuit.USE_DB)) { mySQL_connector = MySQL_Connector.newDefaultInstance( "jdbc:mysql://" + tuitProperties.getDBConnection().getUrl().trim() + "/", tuitProperties.getDBConnection().getLogin().trim(), tuitProperties.getDBConnection().getPassword().trim()); mySQL_connector.connectToDatabase(); connection = mySQL_connector.getConnection(); } else { //Probe for ram database if (ramDbFile.exists() && ramDbFile.canRead()) { Log.getInstance().log(Level.INFO, "Loading RAM taxonomic map..."); try { ramDb = RamDb.loadSelfFromFile(ramDbFile); } catch (IOException ie) { if (ie instanceof java.io.InvalidClassException) throw new IOException("The RAM-based taxonomic database needs to be updated."); } } else { Log.getInstance().log(Level.SEVERE, "The RAM database either has not been deployed, or is not accessible." + "Please use the --deploy option and check permissions on the TUIT directory. " + "If you were looking to use the RDBMS as a taxonomic reference, plese use the -usedb option."); return; } } if (commandLine.hasOption(tuit.B)) { blastOutputFile = new File(commandLine.getOptionValue(tuit.B)); if (!blastOutputFile.exists() || !blastOutputFile.canRead()) { throw new Exception("BLAST output file either does not exist, or is not readable."); } else if (blastOutputFile.isDirectory()) { throw new Exception("BLAST output file points to a directory."); } } //Check vital parameters if (!commandLine.hasOption(tuit.IN)) { throw new ParseException("No input file option found, exiting."); } else { inputFile = new File(commandLine.getOptionValue(tuit.IN)); Log.getInstance().setLogName(inputFile.getName().split("\\.")[0] + ".tuit.log"); } //Correct the output file option if needed if (!commandLine.hasOption(tuit.OUT)) { outputFile = new File((inputFile.getPath()).split("\\.")[0] + tuit.TUIT_EXT); } else { outputFile = new File(commandLine.getOptionValue(tuit.OUT)); } //Adjust the output level if (commandLine.hasOption(tuit.V)) { Log.getInstance().setLevel(Level.FINE); Log.getInstance().log(Level.INFO, "Using verbose output for the log"); } else { Log.getInstance().setLevel(Level.INFO); } //Try all files if (inputFile != null) { if (!inputFile.exists() || !inputFile.canRead()) { throw new Exception("Input file either does not exist, or is not readable."); } else if (inputFile.isDirectory()) { throw new Exception("Input file points to a directory."); } } if (!properties.exists() || !properties.canRead()) { throw new Exception("Properties file either does not exist, or is not readable."); } else if (properties.isDirectory()) { throw new Exception("Properties file points to a directory."); } //Create blast parameters final StringBuilder stringBuilder = new StringBuilder(); for (Database database : tuitProperties.getBLASTNParameters().getDatabase()) { stringBuilder.append(database.getUse()); stringBuilder.append(" ");//Gonna insert an extra space for the last database } String remote; String entrez_query; if (tuitProperties.getBLASTNParameters().getRemote().getDelegate().equals("yes")) { remote = "-remote"; entrez_query = "-entrez_query"; parameters = new String[] { "-db", stringBuilder.toString(), remote, entrez_query, tuitProperties.getBLASTNParameters().getEntrezQuery().getValue(), "-evalue", tuitProperties.getBLASTNParameters().getExpect().getValue() }; } else { if (!commandLine.hasOption(tuit.B)) { if (tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase() .startsWith("NOT") || tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase() .startsWith("ALL")) { parameters = new String[] { "-db", stringBuilder.toString(), "-evalue", tuitProperties.getBLASTNParameters().getExpect().getValue(), "-negative_gilist", TUITFileOperatorHelper.restrictToEntrez(tmpDir, tuitProperties.getBLASTNParameters().getEntrezQuery().getValue() .toUpperCase().replace("NOT", "OR")) .getAbsolutePath(), "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() }; } else if (tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase() .equals("")) { parameters = new String[] { "-db", stringBuilder.toString(), "-evalue", tuitProperties.getBLASTNParameters().getExpect().getValue(), "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() }; } else { parameters = new String[] { "-db", stringBuilder.toString(), "-evalue", tuitProperties.getBLASTNParameters().getExpect().getValue(), /*"-gilist", TUITFileOperatorHelper.restrictToEntrez( tmpDir, tuitProperties.getBLASTNParameters().getEntrezQuery().getValue()).getAbsolutePath(),*/ //TODO remove comment!!!!! "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() }; } } } //Prepare a cutoff Map if (tuitProperties.getSpecificationParameters() != null && tuitProperties.getSpecificationParameters().size() > 0) { cutoffMap = new HashMap<Ranks, TUITCutoffSet>(tuitProperties.getSpecificationParameters().size()); for (SpecificationParameters specificationParameters : tuitProperties .getSpecificationParameters()) { cutoffMap.put(Ranks.valueOf(specificationParameters.getCutoffSet().getRank()), TUITCutoffSet.newDefaultInstance( Double.parseDouble( specificationParameters.getCutoffSet().getPIdentCutoff().getValue()), Double.parseDouble(specificationParameters.getCutoffSet() .getQueryCoverageCutoff().getValue()), Double.parseDouble( specificationParameters.getCutoffSet().getAlpha().getValue()))); } } else { cutoffMap = new HashMap<Ranks, TUITCutoffSet>(); } final TUITFileOperatorHelper.OutputFormat format; if (tuitProperties.getBLASTNParameters().getOutputFormat().getFormat().equals("rdp")) { format = TUITFileOperatorHelper.OutputFormat.RDP_FIXRANK; } else { format = TUITFileOperatorHelper.OutputFormat.TUIT; } try (TUITFileOperator<NucleotideFasta> nucleotideFastaTUITFileOperator = NucleotideFastaTUITFileOperator .newInstance(format, cutoffMap);) { nucleotideFastaTUITFileOperator.setInputFile(inputFile); nucleotideFastaTUITFileOperator.setOutputFile(outputFile); final String cleanupString = tuitProperties.getBLASTNParameters().getKeepBLASTOuts().getKeep(); final boolean cleanup; if (cleanupString.equals("no")) { Log.getInstance().log(Level.INFO, "Temporary BLAST files will be deleted."); cleanup = true; } else { Log.getInstance().log(Level.INFO, "Temporary BLAST files will be kept."); cleanup = false; } //Create blast identifier ExecutorService executorService = Executors.newSingleThreadExecutor(); if (commandLine.hasOption(tuit.USE_DB)) { if (blastOutputFile == null) { blastIdentifier = TUITBLASTIdentifierDB.newInstanceFromFileOperator(tmpDir, blastnExecutable, parameters, nucleotideFastaTUITFileOperator, connection, cutoffMap, Integer.parseInt( tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()), cleanup); } else { try { blastIdentifier = TUITBLASTIdentifierDB.newInstanceFromBLASTOutput( nucleotideFastaTUITFileOperator, connection, cutoffMap, blastOutputFile, Integer.parseInt( tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()), cleanup); } catch (JAXBException e) { Log.getInstance().log(Level.SEVERE, "Error reading " + blastOutputFile.getName() + ", please check input. The file must be XML formatted."); } catch (Exception e) { e.printStackTrace(); } } } else { if (blastOutputFile == null) { blastIdentifier = TUITBLASTIdentifierRAM.newInstanceFromFileOperator(tmpDir, blastnExecutable, parameters, nucleotideFastaTUITFileOperator, cutoffMap, Integer.parseInt( tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()), cleanup, ramDb); } else { try { blastIdentifier = TUITBLASTIdentifierRAM.newInstanceFromBLASTOutput( nucleotideFastaTUITFileOperator, cutoffMap, blastOutputFile, Integer.parseInt( tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()), cleanup, ramDb); } catch (JAXBException e) { Log.getInstance().log(Level.SEVERE, "Error reading " + blastOutputFile.getName() + ", please check input. The file must be XML formatted."); } catch (Exception e) { e.printStackTrace(); } } } Future<?> runnableFuture = executorService.submit(blastIdentifier); runnableFuture.get(); executorService.shutdown(); } } catch (ParseException pe) { Log.getInstance().log(Level.SEVERE, (pe.getMessage())); formatter.printHelp("tuit", options); } catch (SAXException saxe) { Log.getInstance().log(Level.SEVERE, saxe.getMessage()); } catch (FileNotFoundException fnfe) { Log.getInstance().log(Level.SEVERE, fnfe.getMessage()); } catch (TUITPropertyBadFormatException tpbfe) { Log.getInstance().log(Level.SEVERE, tpbfe.getMessage()); } catch (ClassCastException cce) { Log.getInstance().log(Level.SEVERE, cce.getMessage()); } catch (JAXBException jaxbee) { Log.getInstance().log(Level.SEVERE, "The properties file is not well formatted. Please ensure that the XML is consistent with the io.properties.dtd schema."); } catch (ClassNotFoundException cnfe) { //Probably won't happen unless the library deleted from the .jar Log.getInstance().log(Level.SEVERE, cnfe.getMessage()); //cnfe.printStackTrace(); } catch (SQLException sqle) { Log.getInstance().log(Level.SEVERE, "A database communication error occurred with the following message:\n" + sqle.getMessage()); //sqle.printStackTrace(); if (sqle.getMessage().contains("Access denied for user")) { Log.getInstance().log(Level.SEVERE, "Please use standard database login: " + NCBITablesDeployer.login + " and password: " + NCBITablesDeployer.password); } } catch (Exception e) { Log.getInstance().log(Level.SEVERE, e.getMessage()); e.printStackTrace(); } finally { if (connection != null) { try { connection.close(); } catch (SQLException sqle) { Log.getInstance().log(Level.SEVERE, "Problem closing the database connection: " + sqle); } } Log.getInstance().log(Level.FINE, "Task done, exiting..."); } }
From source file:hyperheuristics.main.comparisons.ComputeIndicators.java
public static void main(String[] args) throws IOException, InterruptedException { int[] numberOfObjectivesArray = new int[] { 2, 4 }; String[] problems = new String[] { "OO_MyBatis", "OA_AJHsqldb", "OA_AJHotDraw", "OO_BCEL", "OO_JHotDraw", "OA_HealthWatcher", // "OA_TollSystems", "OO_JBoss" }; String[] heuristicFunctions = new String[] { LowLevelHeuristic.CHOICE_FUNCTION, LowLevelHeuristic.MULTI_ARMED_BANDIT, LowLevelHeuristic.RANDOM }; String[] algorithms = new String[] { "NSGA-II", // "SPEA2" };//w w w . j a v a2 s . c o m MetricsUtil metricsUtil = new MetricsUtil(); DecimalFormat decimalFormatter = new DecimalFormat("0.00E0"); Mean mean = new Mean(); StandardDeviation standardDeviation = new StandardDeviation(); InvertedGenerationalDistance igd = new InvertedGenerationalDistance(); GenerationalDistance gd = new GenerationalDistance(); Spread spread = new Spread(); Coverage coverage = new Coverage(); for (int objectives : numberOfObjectivesArray) { try (FileWriter IGDWriter = new FileWriter("experiment/IGD_" + objectives + ".tex"); FileWriter spreadWriter = new FileWriter("experiment/SPREAD_" + objectives + ".tex"); FileWriter GDWriter = new FileWriter("experiment/GD_" + objectives + ".tex"); FileWriter coverageWriter = new FileWriter("experiment/COVERAGE_" + objectives + ".tex")) { StringBuilder latexTableBuilder = new StringBuilder(); latexTableBuilder.append("\\documentclass{paper}\n").append("\n") .append("\\usepackage[T1]{fontenc}\n").append("\\usepackage[latin1]{inputenc}\n") .append("\\usepackage[hidelinks]{hyperref}\n").append("\\usepackage{tabulary}\n") .append("\\usepackage{booktabs}\n").append("\\usepackage{multirow}\n") .append("\\usepackage{amsmath}\n").append("\\usepackage{mathtools}\n") .append("\\usepackage{graphicx}\n").append("\\usepackage{array}\n") .append("\\usepackage[linesnumbered,ruled,inoutnumbered]{algorithm2e}\n") .append("\\usepackage{subfigure}\n").append("\\usepackage[hypcap]{caption}\n") .append("\\usepackage{pdflscape}\n").append("\n").append("\\begin{document}\n").append("\n") .append("\\begin{landscape}\n").append("\n"); pfKnown: { latexTableBuilder.append("\\begin{table}[!htb]\n").append("\t\\centering\n") .append("\t\\def\\arraystretch{1.5}\n") // .append("\t\\setlength{\\tabcolsep}{10pt}\n") // .append("\t\\fontsize{8pt}{10pt}\\selectfont\n") .append("\t\\caption{INDICATOR found for $PF_{known}$ for ").append(objectives) .append(" objectives}\n").append("\t\\label{tab:INDICATOR ").append(objectives) .append(" objectives}\n").append("\t\\begin{tabulary}{\\linewidth}{c"); for (String algorithm : algorithms) { latexTableBuilder.append("c"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append("c"); } } latexTableBuilder.append("}\n").append("\t\t\\toprule\n").append("\t\t\\textbf{System}"); for (String algorithm : algorithms) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("}"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("-") .append(heuristicFunction).append("}"); } } latexTableBuilder.append("\\\\\n").append("\t\t\\midrule\n"); for (String problem : problems) { NonDominatedSolutionList trueFront = new NonDominatedSolutionList(); pfTrueComposing: { for (String algorithm : algorithms) { SolutionSet mecbaFront = metricsUtil.readNonDominatedSolutionSet( "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); trueFront.addAll(mecbaFront); for (String hyperHeuristic : heuristicFunctions) { SolutionSet front = metricsUtil.readNonDominatedSolutionSet( "experiment/" + algorithm + "/" + objectives + "objectives/" + hyperHeuristic + "/" + problem + "/FUN.txt"); trueFront.addAll(front); } } } double[][] trueFrontMatrix = trueFront.writeObjectivesToMatrix(); HashMap<String, Double> igdMap = new HashMap<>(); HashMap<String, Double> gdMap = new HashMap<>(); HashMap<String, Double> spreadMap = new HashMap<>(); HashMap<String, Double> coverageMap = new HashMap<>(); for (String algorithm : algorithms) { double[][] mecbaFront = metricsUtil .readFront("resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); igdMap.put(algorithm, igd.invertedGenerationalDistance(mecbaFront, trueFrontMatrix, objectives)); gdMap.put(algorithm, gd.generationalDistance(mecbaFront, trueFrontMatrix, objectives)); spreadMap.put(algorithm, spread.spread(mecbaFront, trueFrontMatrix, objectives)); coverageMap.put(algorithm, coverage.coverage(mecbaFront, trueFrontMatrix)); for (String heuristic : heuristicFunctions) { double[][] heuristicFront = metricsUtil.readFront("experiment/" + algorithm + "/" + objectives + "objectives/" + heuristic + "/" + problem + "/FUN.txt"); igdMap.put(algorithm + "-" + heuristic, igd .invertedGenerationalDistance(heuristicFront, trueFrontMatrix, objectives)); gdMap.put(algorithm + "-" + heuristic, gd.generationalDistance(heuristicFront, trueFrontMatrix, objectives)); spreadMap.put(algorithm + "-" + heuristic, spread.spread(heuristicFront, trueFrontMatrix, objectives)); coverageMap.put(algorithm + "-" + heuristic, coverage.coverage(heuristicFront, trueFrontMatrix)); } } latexTableBuilder.append("\t\t").append(problem); String latexTable = latexTableBuilder.toString(); latexTableBuilder = new StringBuilder(); latexTable = latexTable.replaceAll("O[OA]\\_", "").replaceAll("ChoiceFunction", "CF") .replaceAll("MultiArmedBandit", "MAB"); IGDWriter.write(latexTable.replaceAll("INDICATOR", "IGD")); spreadWriter.write(latexTable.replaceAll("INDICATOR", "Spread")); GDWriter.write(latexTable.replaceAll("INDICATOR", "GD")); coverageWriter.write(latexTable.replaceAll("INDICATOR", "Coverage")); String bestHeuristicIGD = "NULL"; String bestHeuristicGD = "NULL"; String bestHeuristicSpread = "NULL"; String bestHeuristicCoverage = "NULL"; getBest: { double bestMeanIGD = Double.POSITIVE_INFINITY; double bestMeanGD = Double.POSITIVE_INFINITY; double bestMeanSpread = Double.NEGATIVE_INFINITY; double bestMeanCoverage = Double.NEGATIVE_INFINITY; for (String heuristic : igdMap.keySet()) { double heuristicIGD = igdMap.get(heuristic); double heuristicGD = gdMap.get(heuristic); double heuristicSpread = spreadMap.get(heuristic); double heuristicCoverage = coverageMap.get(heuristic); if (heuristicIGD < bestMeanIGD) { bestMeanIGD = heuristicIGD; bestHeuristicIGD = heuristic; } if (heuristicGD < bestMeanGD) { bestMeanGD = heuristicGD; bestHeuristicGD = heuristic; } if (heuristicSpread > bestMeanSpread) { bestMeanSpread = heuristicSpread; bestHeuristicSpread = heuristic; } if (heuristicCoverage > bestMeanCoverage) { bestMeanCoverage = heuristicCoverage; bestHeuristicCoverage = heuristic; } } } StringBuilder igdBuilder = new StringBuilder(); StringBuilder gdBuilder = new StringBuilder(); StringBuilder spreadBuilder = new StringBuilder(); StringBuilder coverageBuilder = new StringBuilder(); String[] newHeuristicFunctions = new String[heuristicFunctions.length * algorithms.length + algorithms.length]; fulfillNewHeuristics: { int i = 0; for (String algorithm : algorithms) { newHeuristicFunctions[i++] = algorithm; for (String heuristicFunction : heuristicFunctions) { newHeuristicFunctions[i++] = algorithm + "-" + heuristicFunction; } } } for (String heuristic : newHeuristicFunctions) { igdBuilder.append(" & "); boolean bold = heuristic.equals(bestHeuristicIGD) || igdMap.get(heuristic).equals(igdMap.get(bestHeuristicIGD)); if (bold) { igdBuilder.append("\\textbf{"); } igdBuilder.append(decimalFormatter.format(igdMap.get(heuristic))); if (bold) { igdBuilder.append("}"); } gdBuilder.append(" & "); bold = heuristic.equals(bestHeuristicGD) || gdMap.get(heuristic).equals(gdMap.get(bestHeuristicGD)); if (bold) { gdBuilder.append("\\textbf{"); } gdBuilder.append(decimalFormatter.format(gdMap.get(heuristic))); if (bold) { gdBuilder.append("}"); } spreadBuilder.append(" & "); bold = heuristic.equals(bestHeuristicSpread) || spreadMap.get(heuristic).equals(spreadMap.get(bestHeuristicSpread)); if (bold) { spreadBuilder.append("\\textbf{"); } spreadBuilder.append(decimalFormatter.format(spreadMap.get(heuristic))); if (bold) { spreadBuilder.append("}"); } coverageBuilder.append(" & "); bold = heuristic.equals(bestHeuristicCoverage) || coverageMap.get(heuristic).equals(coverageMap.get(bestHeuristicCoverage)); if (bold) { coverageBuilder.append("\\textbf{"); } coverageBuilder.append(decimalFormatter.format(coverageMap.get(heuristic))); if (bold) { coverageBuilder.append("}"); } } IGDWriter.write(igdBuilder + "\\\\\n"); spreadWriter.write(spreadBuilder + "\\\\\n"); GDWriter.write(gdBuilder + "\\\\\n"); coverageWriter.write(coverageBuilder + "\\\\\n"); } latexTableBuilder = new StringBuilder(); latexTableBuilder.append("\t\t\\bottomrule\n").append("\t\\end{tabulary}\n") .append("\\end{table}\n\n"); } averages: { latexTableBuilder.append("\\begin{table}[!htb]\n").append("\t\\centering\n") .append("\t\\def\\arraystretch{1.5}\n") // .append("\t\\setlength{\\tabcolsep}{10pt}\n") // .append("\t\\fontsize{8pt}{10pt}\\selectfont\n") .append("\t\\caption{INDICATOR averages found for ").append(objectives) .append(" objectives}\n").append("\t\\label{tab:INDICATOR ").append(objectives) .append(" objectives}\n").append("\t\\begin{tabulary}{\\linewidth}{c"); for (String algorithm : algorithms) { latexTableBuilder.append("c"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append("c"); } } latexTableBuilder.append("}\n").append("\t\t\\toprule\n").append("\t\t\\textbf{System}"); for (String algorithm : algorithms) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("}"); for (String heuristicFunction : heuristicFunctions) { latexTableBuilder.append(" & \\textbf{").append(algorithm).append("-") .append(heuristicFunction).append("}"); } } latexTableBuilder.append("\\\\\n").append("\t\t\\midrule\n"); for (String problem : problems) { NonDominatedSolutionList trueFront = new NonDominatedSolutionList(); pfTrueComposing: { for (String algorithm : algorithms) { SolutionSet mecbaFront = metricsUtil.readNonDominatedSolutionSet( "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); trueFront.addAll(mecbaFront); for (String hyperHeuristic : heuristicFunctions) { SolutionSet front = metricsUtil.readNonDominatedSolutionSet( "experiment/" + algorithm + "/" + objectives + "objectives/" + hyperHeuristic + "/" + problem + "/FUN.txt"); trueFront.addAll(front); } } } double[][] trueFrontMatrix = trueFront.writeObjectivesToMatrix(); HashMap<String, double[]> igdMap = new HashMap<>(); HashMap<String, double[]> gdMap = new HashMap<>(); HashMap<String, double[]> spreadMap = new HashMap<>(); HashMap<String, double[]> coverageMap = new HashMap<>(); mocaito: { for (String algorithm : algorithms) { double[] mecbaIGDs = new double[EXECUTIONS]; double[] mecbaGDs = new double[EXECUTIONS]; double[] mecbaSpreads = new double[EXECUTIONS]; double[] mecbaCoverages = new double[EXECUTIONS]; for (int i = 0; i < EXECUTIONS; i++) { double[][] mecbaFront = metricsUtil.readFront("resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + objectives + "obj/FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem + "-" + i + ".NaoDominadas"); mecbaIGDs[i] = igd.invertedGenerationalDistance(mecbaFront, trueFrontMatrix, objectives); mecbaGDs[i] = gd.generationalDistance(mecbaFront, trueFrontMatrix, objectives); mecbaSpreads[i] = spread.spread(mecbaFront, trueFrontMatrix, objectives); mecbaCoverages[i] = coverage.coverage(mecbaFront, trueFrontMatrix); } igdMap.put(algorithm, mecbaIGDs); gdMap.put(algorithm, mecbaGDs); spreadMap.put(algorithm, mecbaSpreads); coverageMap.put(algorithm, mecbaCoverages); } } for (String algorithm : algorithms) { for (String heuristic : heuristicFunctions) { double[] hhIGDs = new double[EXECUTIONS]; double[] hhGDs = new double[EXECUTIONS]; double[] hhSpreads = new double[EXECUTIONS]; double[] hhCoverages = new double[EXECUTIONS]; for (int i = 0; i < EXECUTIONS; i++) { double[][] hhFront = metricsUtil .readFront("experiment/" + algorithm + "/" + objectives + "objectives/" + heuristic + "/" + problem + "/EXECUTION_" + i + "/FUN.txt"); hhIGDs[i] = igd.invertedGenerationalDistance(hhFront, trueFrontMatrix, objectives); hhGDs[i] = gd.generationalDistance(hhFront, trueFrontMatrix, objectives); hhSpreads[i] = spread.spread(hhFront, trueFrontMatrix, objectives); hhCoverages[i] = coverage.coverage(hhFront, trueFrontMatrix); } igdMap.put(algorithm + "-" + heuristic, hhIGDs); gdMap.put(algorithm + "-" + heuristic, hhGDs); spreadMap.put(algorithm + "-" + heuristic, hhSpreads); coverageMap.put(algorithm + "-" + heuristic, hhCoverages); } } HashMap<String, HashMap<String, Boolean>> igdResult = KruskalWallisTest.test(igdMap); HashMap<String, HashMap<String, Boolean>> gdResult = KruskalWallisTest.test(gdMap); HashMap<String, HashMap<String, Boolean>> spreadResult = KruskalWallisTest.test(spreadMap); HashMap<String, HashMap<String, Boolean>> coverageResult = KruskalWallisTest .test(coverageMap); latexTableBuilder.append("\t\t").append(problem); String latexTable = latexTableBuilder.toString(); latexTable = latexTable.replaceAll("O[OA]\\_", "").replaceAll("ChoiceFunction", "CF") .replaceAll("MultiArmedBandit", "MAB"); IGDWriter.write(latexTable.replaceAll("INDICATOR", "IGD")); spreadWriter.write(latexTable.replaceAll("INDICATOR", "Spread")); GDWriter.write(latexTable.replaceAll("INDICATOR", "GD")); coverageWriter.write(latexTable.replaceAll("INDICATOR", "Coverage")); latexTableBuilder = new StringBuilder(); String bestHeuristicIGD = "NULL"; String bestHeuristicGD = "NULL"; String bestHeuristicSpread = "NULL"; String bestHeuristicCoverage = "NULL"; getBest: { double bestMeanIGD = Double.POSITIVE_INFINITY; double bestMeanGD = Double.POSITIVE_INFINITY; double bestMeanSpread = Double.NEGATIVE_INFINITY; double bestMeanCoverage = Double.NEGATIVE_INFINITY; for (String heuristic : igdMap.keySet()) { double heuristicMeanIGD = mean.evaluate(igdMap.get(heuristic)); double heuristicMeanGD = mean.evaluate(gdMap.get(heuristic)); double heuristicMeanSpread = mean.evaluate(spreadMap.get(heuristic)); double heuristicMeanCoverage = mean.evaluate(coverageMap.get(heuristic)); if (heuristicMeanIGD < bestMeanIGD) { bestMeanIGD = heuristicMeanIGD; bestHeuristicIGD = heuristic; } if (heuristicMeanGD < bestMeanGD) { bestMeanGD = heuristicMeanGD; bestHeuristicGD = heuristic; } if (heuristicMeanSpread > bestMeanSpread) { bestMeanSpread = heuristicMeanSpread; bestHeuristicSpread = heuristic; } if (heuristicMeanCoverage > bestMeanCoverage) { bestMeanCoverage = heuristicMeanCoverage; bestHeuristicCoverage = heuristic; } } } StringBuilder igdBuilder = new StringBuilder(); StringBuilder gdBuilder = new StringBuilder(); StringBuilder spreadBuilder = new StringBuilder(); StringBuilder coverageBuilder = new StringBuilder(); String[] newHeuristicFunctions = new String[heuristicFunctions.length * algorithms.length + algorithms.length]; fulfillNewHeuristics: { int i = 0; for (String algorithm : algorithms) { newHeuristicFunctions[i++] = algorithm; for (String heuristicFunction : heuristicFunctions) { newHeuristicFunctions[i++] = algorithm + "-" + heuristicFunction; } } } for (String heuristic : newHeuristicFunctions) { igdBuilder.append(" & "); boolean bold = heuristic.equals(bestHeuristicIGD) || !igdResult.get(heuristic).get(bestHeuristicIGD); if (bold) { igdBuilder.append("\\textbf{"); } igdBuilder.append(decimalFormatter.format(mean.evaluate(igdMap.get(heuristic))) + " (" + decimalFormatter.format(standardDeviation.evaluate(igdMap.get(heuristic))) + ")"); if (bold) { igdBuilder.append("}"); } gdBuilder.append(" & "); bold = heuristic.equals(bestHeuristicGD) || !gdResult.get(heuristic).get(bestHeuristicGD); if (bold) { gdBuilder.append("\\textbf{"); } gdBuilder.append(decimalFormatter.format(mean.evaluate(gdMap.get(heuristic))) + " (" + decimalFormatter.format(standardDeviation.evaluate(gdMap.get(heuristic))) + ")"); if (bold) { gdBuilder.append("}"); } spreadBuilder.append(" & "); bold = heuristic.equals(bestHeuristicSpread) || !spreadResult.get(heuristic).get(bestHeuristicSpread); if (bold) { spreadBuilder.append("\\textbf{"); } spreadBuilder.append(decimalFormatter.format(mean.evaluate(spreadMap.get(heuristic))) + " (" + decimalFormatter.format(standardDeviation.evaluate(spreadMap.get(heuristic))) + ")"); if (bold) { spreadBuilder.append("}"); } coverageBuilder.append(" & "); bold = heuristic.equals(bestHeuristicCoverage) || !coverageResult.get(heuristic).get(bestHeuristicCoverage); if (bold) { coverageBuilder.append("\\textbf{"); } coverageBuilder .append(decimalFormatter.format(mean.evaluate(coverageMap.get(heuristic)))) .append(" (") .append(decimalFormatter .format(standardDeviation.evaluate(coverageMap.get(heuristic)))) .append(")"); if (bold) { coverageBuilder.append("}"); } } IGDWriter.write(igdBuilder + "\\\\\n"); spreadWriter.write(spreadBuilder + "\\\\\n"); GDWriter.write(gdBuilder + "\\\\\n"); coverageWriter.write(coverageBuilder + "\\\\\n"); } latexTableBuilder.append("\t\t\\bottomrule\n").append("\t\\end{tabulary}\n") .append("\\end{table}\n\n"); } latexTableBuilder.append("\\end{landscape}\n\n").append("\\end{document}"); String latexTable = latexTableBuilder.toString(); IGDWriter.write(latexTable); spreadWriter.write(latexTable); GDWriter.write(latexTable); coverageWriter.write(latexTable); } } }
From source file:examples.mail.IMAPImportMbox.java
public static void main(String[] args) throws IOException { if (args.length < 2) { System.err.println(//from w w w . j a v a2s . co m "Usage: IMAPImportMbox imap[s]://user:password@host[:port]/folder/path <mboxfile> [selectors]"); System.err.println("\tWhere: a selector is a list of numbers/number ranges - 1,2,3-10" + " - or a list of strings to match in the initial From line"); System.exit(1); } final URI uri = URI.create(args[0]); final String file = args[1]; final File mbox = new File(file); if (!mbox.isFile() || !mbox.canRead()) { throw new IOException("Cannot read mailbox file: " + mbox); } String path = uri.getPath(); if (path == null || path.length() < 1) { throw new IllegalArgumentException("Invalid folderPath: '" + path + "'"); } String folder = path.substring(1); // skip the leading / List<String> contains = new ArrayList<String>(); // list of strings to find BitSet msgNums = new BitSet(); // list of message numbers for (int i = 2; i < args.length; i++) { String arg = args[i]; if (arg.matches("\\d+(-\\d+)?(,\\d+(-\\d+)?)*")) { // number,m-n for (String entry : arg.split(",")) { String[] parts = entry.split("-"); if (parts.length == 2) { // m-n int low = Integer.parseInt(parts[0]); int high = Integer.parseInt(parts[1]); for (int j = low; j <= high; j++) { msgNums.set(j); } } else { msgNums.set(Integer.parseInt(entry)); } } } else { contains.add(arg); // not a number/number range } } // System.out.println(msgNums.toString()); // System.out.println(java.util.Arrays.toString(contains.toArray())); // Connect and login final IMAPClient imap = IMAPUtils.imapLogin(uri, 10000, null); int total = 0; int loaded = 0; try { imap.setSoTimeout(6000); final BufferedReader br = new BufferedReader(new FileReader(file)); // TODO charset? String line; StringBuilder sb = new StringBuilder(); boolean wanted = false; // Skip any leading rubbish while ((line = br.readLine()) != null) { if (line.startsWith("From ")) { // start of message; i.e. end of previous (if any) if (process(sb, imap, folder, total)) { // process previous message (if any) loaded++; } sb.setLength(0); total++; wanted = wanted(total, line, msgNums, contains); } else if (startsWith(line, PATFROM)) { // Unescape ">+From " in body text line = line.substring(1); } // TODO process first Received: line to determine arrival date? if (wanted) { sb.append(line); sb.append(CRLF); } } br.close(); if (wanted && process(sb, imap, folder, total)) { // last message (if any) loaded++; } } catch (IOException e) { System.out.println(imap.getReplyString()); e.printStackTrace(); System.exit(10); return; } finally { imap.logout(); imap.disconnect(); } System.out.println("Processed " + total + " messages, loaded " + loaded); }