List of usage examples for java.io FileWriter append
@Override public Writer append(CharSequence csq) throws IOException
From source file:enrichment.Disambiguate.java
/**prerequisites: * cd silk_2.5.3/*_links//from ww w. ja v a 2 s . c o m * cat *.nt|sort -t' ' -k3 > $filename * * @param args $filename * @throws IOException * @throws URISyntaxException */ public static void main(String[] args) { File file = new File(args[0]); if (file.isDirectory()) { args = file.list(new OnlyExtFilenameFilter("nt")); } BufferedReader in; for (int q = 0; q < args.length; q++) { String filename = null; if (file.isDirectory()) { filename = file.getPath() + File.separator + args[q]; } else { filename = args[q]; } try { FileWriter output = new FileWriter(filename + "_disambiguated.nt"); String prefix = "@prefix rdrel: <http://rdvocab.info/RDARelationshipsWEMI/> .\n" + "@prefix dbpedia: <http://de.dbpedia.org/resource/> .\n" + "@prefix frbr: <http://purl.org/vocab/frbr/core#> .\n" + "@prefix lobid: <http://lobid.org/resource/> .\n" + "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n" + "@prefix foaf: <http://xmlns.com/foaf/0.1/> .\n" + "@prefix mo: <http://purl.org/ontology/mo/> .\n" + "@prefix wikipedia: <https://de.wikipedia.org/wiki/> ."; output.append(prefix + "\n\n"); in = new BufferedReader(new InputStreamReader(new FileInputStream(filename))); HashMap<String, HashMap<String, ArrayList<String>>> hm = new HashMap<String, HashMap<String, ArrayList<String>>>(); String s; HashMap<String, ArrayList<String>> hmLobid = new HashMap<String, ArrayList<String>>(); Stack<String> old_object = new Stack<String>(); while ((s = in.readLine()) != null) { String[] triples = s.split(" "); String object = triples[2].substring(1, triples[2].length() - 1); if (old_object.size() > 0 && !old_object.firstElement().equals(object)) { hmLobid = new HashMap<String, ArrayList<String>>(); old_object = new Stack<String>(); } old_object.push(object); String subject = triples[0].substring(1, triples[0].length() - 1); System.out.print("\nSubject=" + object); System.out.print("\ntriples[2]=" + triples[2]); hmLobid.put(subject, getAllCreators(new URI(subject))); hm.put(object, hmLobid); } // get all dbpedia resources for (String key_one : hm.keySet()) { System.out.print("\n==============\n==== " + key_one + "\n==============="); int resources_cnt = hm.get(key_one).keySet().size(); ArrayList<String>[] creators = new ArrayList[resources_cnt]; HashMap<String, Integer> creators_backed = new HashMap<String, Integer>(); int x = 0; // get all lobid_resources subsumed under the dbpedia resource for (String subject_uri : hm.get(key_one).keySet()) { creators[x] = new ArrayList<String>(); System.out.print("\n subject_uri=" + subject_uri); Iterator<String> ite = hm.get(key_one).get(subject_uri).iterator(); int y = 0; // get all creators of the lobid resource while (ite.hasNext()) { String creator = ite.next(); System.out.print("\n " + creator); if (creators_backed.containsKey(creator)) { y = creators_backed.get(creator); } else { y = creators_backed.size(); creators_backed.put(creator, y); } while (creators[x].size() <= y) { creators[x].add("-"); } creators[x].set(y, creator); y++; } x++; } if (creators_backed.size() == 1) { System.out .println("\n" + "Every resource pointing to " + key_one + " has the same creator!"); for (String key_two : hm.get(key_one).keySet()) { output.append("<" + key_two + "> rdrel:workManifested <" + key_one + "> .\n"); output.append("<" + key_two + "> mo:wikipedia <" + key_one.replaceAll("dbpedia\\.org/resource", "wikipedia\\.org/wiki") + "> .\n"); } } /*else { for (int a = 0; a < creators.length; a++) { System.out.print(creators[a].toString()+","); } }*/ } output.flush(); if (output != null) { output.close(); } } catch (Exception e) { System.out.print("Exception while working on " + filename + ": \n"); e.printStackTrace(System.out); } } }
From source file:de.tudarmstadt.ukp.experiments.dip.wp1.documents.Step8GoldDataAggregator.java
public static void main(String[] args) throws Exception { String inputDir = args[0] + "/"; // output dir File outputDir = new File(args[1]); File turkersConfidence = new File(args[2]); if (outputDir.exists()) { outputDir.delete();/* w ww . j av a2 s .c om*/ } outputDir.mkdir(); List<String> annotatorsIDs = new ArrayList<>(); // for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) { // QueryResultContainer queryResultContainer = QueryResultContainer // .fromXML(FileUtils.readFileToString(f, "utf-8")); // for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { // for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) { // if (!annotatorsIDs.contains(relevanceVote.turkID)) // annotatorsIDs.add(relevanceVote.turkID); // } // } // } HashMap<String, Integer> countVotesForATurker = new HashMap<>(); // creates temporary file with format for mace // Hashmap annotations: key is the id of a document and a sentence // Value is an array votes[] of turkers decisions: true or false (relevant or not) // the length of this array equals the number of annotators in List<String> annotatorsIDs. // If an annotator worked on the task his decision is written in the array otherwise the value is NULL // key: queryID + clueWebID + sentenceID // value: true and false annotations TreeMap<String, Annotations> annotations = new TreeMap<>(); for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) { QueryResultContainer queryResultContainer = QueryResultContainer .fromXML(FileUtils.readFileToString(f, "utf-8")); System.out.println("Reading " + f.getName()); for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { String documentID = rankedResults.clueWebID; for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) { Integer turkerID; if (!annotatorsIDs.contains(relevanceVote.turkID)) { annotatorsIDs.add(relevanceVote.turkID); turkerID = annotatorsIDs.size() - 1; } else { turkerID = annotatorsIDs.indexOf(relevanceVote.turkID); } Integer count = countVotesForATurker.get(relevanceVote.turkID); if (count == null) { count = 0; } count++; countVotesForATurker.put(relevanceVote.turkID, count); String id; List<Integer> trueVotes; List<Integer> falseVotes; for (QueryResultContainer.SingleSentenceRelevanceVote singleSentenceRelevanceVote : relevanceVote.singleSentenceRelevanceVotes) if (!"".equals(singleSentenceRelevanceVote.sentenceID)) { id = f.getName() + "_" + documentID + "_" + singleSentenceRelevanceVote.sentenceID; Annotations turkerVotes = annotations.get(id); if (turkerVotes == null) { trueVotes = new ArrayList<>(); falseVotes = new ArrayList<>(); turkerVotes = new Annotations(trueVotes, falseVotes); } trueVotes = turkerVotes.trueAnnotations; falseVotes = turkerVotes.falseAnnotations; if ("true".equals(singleSentenceRelevanceVote.relevant)) { // votes[turkerID] = true; trueVotes.add(turkerID); } else if ("false".equals(singleSentenceRelevanceVote.relevant)) { // votes[turkerID] = false; falseVotes.add(turkerID); } else { throw new IllegalStateException("Annotation value of sentence " + singleSentenceRelevanceVote.sentenceID + " in " + rankedResults.clueWebID + " equals " + singleSentenceRelevanceVote.relevant); } try { int allVotesCount = trueVotes.size() + falseVotes.size(); if (allVotesCount > 5) { System.err.println(id + " doesn't have 5 annotators: true: " + trueVotes.size() + " false: " + falseVotes.size()); // nasty hack, we're gonna strip some data; true votes first /* we can't do that, it breaks something down the line int toRemove = allVotesCount - 5; if (trueVotes.size() >= toRemove) { trueVotes = trueVotes .subList(0, trueVotes.size() - toRemove); } else if ( falseVotes.size() >= toRemove) { falseVotes = falseVotes .subList(0, trueVotes.size() - toRemove); } */ System.err.println("Adjusted: " + id + " doesn't have 5 annotators: true: " + trueVotes.size() + " false: " + falseVotes.size()); } } catch (IllegalStateException e) { e.printStackTrace(); } turkerVotes.trueAnnotations = trueVotes; turkerVotes.falseAnnotations = falseVotes; annotations.put(id, turkerVotes); } else { throw new IllegalStateException( "Empty Sentence ID in " + f.getName() + " for turker " + turkerID); } } } } File tmp = printHashMap(annotations, annotatorsIDs.size()); String file = TEMP_DIR + "/" + tmp.getName(); MACE.main(new String[] { "--prefix", file }); //gets the keys of the documents and sentences ArrayList<String> lines = (ArrayList<String>) FileUtils.readLines(new File(file + ".prediction")); int i = 0; TreeMap<String, TreeMap<String, ArrayList<HashMap<String, String>>>> ids = new TreeMap<>(); ArrayList<HashMap<String, String>> sentences; if (lines.size() != annotations.size()) { throw new IllegalStateException( "The size of prediction file is " + lines.size() + "but expected " + annotations.size()); } for (Map.Entry entry : annotations.entrySet()) { //1001.xml_clueweb12-1905wb-13-07360_8783 String key = (String) entry.getKey(); String[] IDs = key.split("_"); if (IDs.length > 2) { String queryID = IDs[0]; String clueWebID = IDs[1]; String sentenceID = IDs[2]; TreeMap<String, ArrayList<HashMap<String, String>>> clueWebIDs = ids.get(queryID); if (clueWebIDs == null) { clueWebIDs = new TreeMap<>(); } sentences = clueWebIDs.get(clueWebID); if (sentences == null) { sentences = new ArrayList<>(); } HashMap<String, String> sentence = new HashMap<>(); sentence.put(sentenceID, lines.get(i)); sentences.add(sentence); clueWebIDs.put(clueWebID, sentences); ids.put(queryID, clueWebIDs); } else { throw new IllegalStateException("Wrong ID " + key); } i++; } for (Map.Entry entry : ids.entrySet()) { TreeMap<Integer, String> value = (TreeMap<Integer, String>) entry.getValue(); String queryID = (String) entry.getKey(); QueryResultContainer queryResultContainer = QueryResultContainer .fromXML(FileUtils.readFileToString(new File(inputDir, queryID), "utf-8")); for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { for (Map.Entry val : value.entrySet()) { String clueWebID = (String) val.getKey(); if (clueWebID.equals(rankedResults.clueWebID)) { List<QueryResultContainer.SingleSentenceRelevanceVote> goldEstimatedLabels = new ArrayList<>(); List<QueryResultContainer.SingleSentenceRelevanceVote> turkersVotes = new ArrayList<>(); int size = 0; int hitSize = 0; String hitID = ""; for (QueryResultContainer.MTurkRelevanceVote vote : rankedResults.mTurkRelevanceVotes) { if (!hitID.equals(vote.hitID)) { hitID = vote.hitID; hitSize = vote.singleSentenceRelevanceVotes.size(); size = size + hitSize; turkersVotes.addAll(vote.singleSentenceRelevanceVotes); } else { if (vote.singleSentenceRelevanceVotes.size() != hitSize) { hitSize = vote.singleSentenceRelevanceVotes.size(); size = size + hitSize; turkersVotes.addAll(vote.singleSentenceRelevanceVotes); } } } ArrayList<HashMap<String, String>> sentenceList = (ArrayList<HashMap<String, String>>) val .getValue(); if (sentenceList.size() != turkersVotes.size()) { try { throw new IllegalStateException("Expected size of annotations is " + turkersVotes.size() + "but found " + sentenceList.size() + " for document " + rankedResults.clueWebID + " in " + queryID); } catch (IllegalStateException ex) { ex.printStackTrace(); } } for (QueryResultContainer.SingleSentenceRelevanceVote s : turkersVotes) { String valSentence = null; for (HashMap<String, String> anno : sentenceList) { if (anno.keySet().contains(s.sentenceID)) { valSentence = anno.get(s.sentenceID); } } QueryResultContainer.SingleSentenceRelevanceVote singleSentenceVote = new QueryResultContainer.SingleSentenceRelevanceVote(); singleSentenceVote.sentenceID = s.sentenceID; if (("false").equals(valSentence)) { singleSentenceVote.relevant = "false"; } else if (("true").equals(valSentence)) { singleSentenceVote.relevant = "true"; } else { throw new IllegalStateException("Annotation value of sentence " + singleSentenceVote.sentenceID + " equals " + val.getValue()); } goldEstimatedLabels.add(singleSentenceVote); } rankedResults.goldEstimatedLabels = goldEstimatedLabels; } } } File outputFile = new File(outputDir, queryID); FileUtils.writeStringToFile(outputFile, queryResultContainer.toXML(), "utf-8"); System.out.println("Finished " + outputFile); } ArrayList<String> annotators = (ArrayList<String>) FileUtils.readLines(new File(file + ".competence")); FileWriter fileWriter; StringBuilder sb = new StringBuilder(); for (int j = 0; j < annotatorsIDs.size(); j++) { String[] s = annotators.get(0).split("\t"); Float score = Float.parseFloat(s[j]); String turkerID = annotatorsIDs.get(j); System.out.println(turkerID + " " + score + " " + countVotesForATurker.get(turkerID)); sb.append(turkerID).append(" ").append(score).append(" ").append(countVotesForATurker.get(turkerID)) .append("\n"); } fileWriter = new FileWriter(turkersConfidence); fileWriter.append(sb.toString()); fileWriter.close(); }
From source file:com.iciql.test.IciqlSuite.java
/** * Main entry point for the test suite. Executing this method will run the * test suite on all registered databases. * /* w ww .ja v a 2 s .c om*/ * @param args * @throws Exception */ public static void main(String... args) throws Exception { Params params = new Params(); JCommander jc = new JCommander(params); try { jc.parse(args); } catch (ParameterException t) { usage(jc, t); } // Replace System.out with a file if (!StringUtils.isNullOrEmpty(params.dbPerformanceFile)) { out = new PrintStream(params.dbPerformanceFile); System.setErr(out); } deleteRecursively(new File("testdbs")); // Start the HSQL and H2 servers in-process org.hsqldb.Server hsql = startHSQL(); org.h2.tools.Server h2 = startH2(); // Statement logging final FileWriter statementWriter; if (StringUtils.isNullOrEmpty(params.sqlStatementsFile)) { statementWriter = null; } else { statementWriter = new FileWriter(params.sqlStatementsFile); } IciqlListener statementListener = new IciqlListener() { @Override public void logIciql(StatementType type, String statement) { if (statementWriter == null) { return; } try { statementWriter.append(statement); statementWriter.append('\n'); } catch (IOException e) { e.printStackTrace(); } } }; IciqlLogger.registerListener(statementListener); SuiteClasses suiteClasses = IciqlSuite.class.getAnnotation(SuiteClasses.class); long quickestDatabase = Long.MAX_VALUE; String dividerMajor = buildDivider('*', 79); String dividerMinor = buildDivider('-', 79); // Header out.println(dividerMajor); out.println(MessageFormat.format("{0} {1} ({2}) testing {3} database configurations", Constants.NAME, Constants.VERSION, Constants.VERSION_DATE, TEST_DBS.length)); out.println(dividerMajor); out.println(); showProperty("java.vendor"); showProperty("java.runtime.version"); showProperty("java.vm.name"); showProperty("os.name"); showProperty("os.version"); showProperty("os.arch"); showProperty("available processors", "" + Runtime.getRuntime().availableProcessors()); showProperty("available memory", MessageFormat.format("{0,number,0.0} GB", ((double) Runtime.getRuntime().maxMemory()) / (1024 * 1024))); out.println(); // Test a database long lastCount = 0; for (TestDb testDb : TEST_DBS) { out.println(dividerMinor); out.println("Testing " + testDb.describeDatabase()); out.println(" " + testDb.url); out.println(dividerMinor); // inject a database section delimiter in the statement log if (statementWriter != null) { statementWriter.append("\n\n"); statementWriter.append("# ").append(dividerMinor).append('\n'); statementWriter.append("# ").append("Testing " + testDb.describeDatabase()).append('\n'); statementWriter.append("# ").append(dividerMinor).append('\n'); statementWriter.append("\n\n"); } if (testDb.getVersion().equals("OFFLINE")) { // Database not available out.println("Skipping. Could not find " + testDb.url); out.println(); } else { // Setup system properties System.setProperty("iciql.url", testDb.url); System.setProperty("iciql.user", testDb.username); System.setProperty("iciql.password", testDb.password); // Test database Result result = JUnitCore.runClasses(suiteClasses.value()); // Report results testDb.runtime = result.getRunTime(); if (testDb.runtime < quickestDatabase) { quickestDatabase = testDb.runtime; } testDb.statements = IciqlLogger.getTotalCount() - lastCount; // reset total count for next database lastCount = IciqlLogger.getTotalCount(); out.println(MessageFormat.format( "{0} tests ({1} failures, {2} ignores) {3} statements in {4,number,0.000} secs", result.getRunCount(), result.getFailureCount(), result.getIgnoreCount(), testDb.statements, result.getRunTime() / 1000f)); if (result.getFailureCount() == 0) { out.println(); out.println(" 100% successful test suite run."); out.println(); } else { for (Failure failure : result.getFailures()) { out.println(MessageFormat.format("\n + {0}\n {1}", failure.getTestHeader(), failure.getMessage())); } out.println(); } } } // Display runtime results sorted by performance leader out.println(); out.println(dividerMajor); out.println(MessageFormat.format("{0} {1} ({2}) test suite performance results", Constants.NAME, Constants.VERSION, Constants.VERSION_DATE)); out.println(dividerMajor); List<TestDb> dbs = Arrays.asList(TEST_DBS); Collections.sort(dbs); out.println(MessageFormat.format("{0} {1} {2} {3} {4}", StringUtils.pad("Name", 11, " ", true), StringUtils.pad("Type", 5, " ", true), StringUtils.pad("Version", 23, " ", true), StringUtils.pad("Stats/Sec", 10, " ", true), "Runtime")); out.println(dividerMinor); for (TestDb testDb : dbs) { DecimalFormat df = new DecimalFormat("0.0"); out.println(MessageFormat.format("{0} {1} {2} {3} {4} {5}s ({6,number,0.0}x)", StringUtils.pad(testDb.name, 11, " ", true), testDb.isEmbedded ? "E" : "T", testDb.isMemory ? "M" : "F", StringUtils.pad(testDb.getVersion(), 21, " ", true), StringUtils.pad("" + testDb.getStatementRate(), 10, " ", false), StringUtils.pad(df.format(testDb.getRuntime()), 8, " ", false), ((double) testDb.runtime) / quickestDatabase)); } out.println(dividerMinor); out.println(" E = embedded connection"); out.println(" T = tcp/ip connection"); out.println(" M = memory database"); out.println(" F = file/persistent database"); // cleanup for (PoolableConnectionFactory factory : connectionFactories.values()) { factory.getPool().close(); } IciqlLogger.unregisterListener(statementListener); out.close(); System.setErr(ERR); if (statementWriter != null) { statementWriter.close(); } hsql.stop(); h2.stop(); System.exit(0); }
From source file:Main.java
static void setSetting(String name, String data) { try {//w w w . j av a 2s . co m File root = new File(Environment.getExternalStorageDirectory().toString(), ".Instagram"); if (!root.exists()) { root.mkdirs(); } File gpxfile = new File(root, name + ".txt"); FileWriter writer = new FileWriter(gpxfile); writer.append(data); writer.flush(); writer.close(); } catch (IOException e) { } }
From source file:Main.java
public static boolean writeLineToFile(File file, String line, boolean append) { try {/* w ww . jav a2 s .com*/ FileWriter fileWriter = new FileWriter(file, append); fileWriter.append(line + "\r\n"); fileWriter.flush(); fileWriter.close(); return true; } catch (IOException e) { e.printStackTrace(); return false; } }
From source file:JSONUtil.java
/** * @param out//from ww w. j a va 2 s. co m * @param string * @throws IOException * @throws JSONException */ public static void saveJson(JSONObject out, String file) throws IOException, JSONException { File fo = new File(file); FileWriter fw = new FileWriter(fo); fw.append(out.toString(4)); fw.close(); }
From source file:cfd.backupper.state.StartupConfig.java
public static void putSetting(String key, List l) { //l needs to be toString(), otherwise there are no doublequotes in JSON. List stringedList = (List) l.stream().map(elem -> elem.toString()).collect(Collectors.toList()); if (jo.containsKey(key)) { jo.replace(key, stringedList);/*w ww . j ava 2s. co m*/ } else { jo.put(key, stringedList); } try { FileWriter fw = new FileWriter(confFile, false); fw.append(jo.toJSONString()); fw.flush(); fw.close(); } catch (IOException ex) { Logger.getLogger(StartupConfig.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:gov.nih.nci.firebird.commons.test.TestFileUtils.java
/** * Creates a temporary file which will be deleted upon JVM exit. * * @return temporary file/*from w w w . j a va 2s. co m*/ * @throws IOException if there is a problem creating a temporary file */ public static File createTemporaryFile() throws IOException { File file = File.createTempFile("temp_", ".tmp"); file.createNewFile(); FileWriter fileWriter = new FileWriter(file); fileWriter.append(SimpleDateFormat.getDateTimeInstance().format(new Date())); fileWriter.flush(); fileWriter.close(); FileUtils.forceDeleteOnExit(file); return file; }
From source file:com.amazonaws.util.FileUtils.java
/** * Appends the given data to the file specified in the input and returns the * reference to the file.//w w w. j a va 2 s . co m * * @param file * @param dataToAppend * @return reference to the file. * @throws IOException */ public static File appendDataToTempFile(File file, String dataToAppend) throws IOException { FileWriter outputWriter = new FileWriter(file); try { outputWriter.append(dataToAppend); } finally { outputWriter.close(); } return file; }
From source file:com.dalthed.tucan.TucanMobile.java
/** * Writes a note on the SD. Only Used for Development purposes * @param sFileName Filname/*w ww .j a v a2 s .c om*/ * @param sBody content * @param mContext App Context */ public static void generateNoteOnSD(String sFileName, String sBody, Context mContext) { try { File root = new File(Environment.getExternalStorageDirectory(), "Notes"); if (!root.exists()) { root.mkdirs(); } File gpxfile = new File(root, sFileName); FileWriter writer = new FileWriter(gpxfile); writer.append(sBody); writer.flush(); writer.close(); //Toast.makeText(mContext, "Saved", Toast.LENGTH_SHORT).show(); } catch (IOException e) { e.printStackTrace(); String importError = e.getMessage(); Log.e("TuCanMobile", importError); } }