List of usage examples for java.io BufferedWriter newLine
public void newLine() throws IOException
From source file:gdsc.smlm.ij.plugins.TraceMolecules.java
private void saveTraceData(StoredDataStatistics s, String name, String fileSuffix) { BufferedWriter file = null; try {/* ww w . j a v a 2s . c om*/ file = new BufferedWriter( new FileWriter(settings.traceDataDirectory + TITLE + "." + fileSuffix + ".txt")); file.append(name); file.newLine(); for (double d : s.getValues()) { file.append(Utils.rounded(d, 4)); file.newLine(); } } catch (Exception e) { // Q. Add better handling of errors? e.printStackTrace(); IJ.log("Failed to save trace data to results directory: " + settings.traceDataDirectory); } finally { if (file != null) { try { file.close(); } catch (IOException e) { e.printStackTrace(); } } } }
From source file:gov.nih.nci.ncicb.tcga.dcc.qclive.common.util.DownloadNCBITraceXML.java
public void saveXMLFiles(int p) { BufferedReader br = null;//w ww.j a v a 2 s .c om BufferedWriter bw = null; try { String pagesquery = "http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?&cmd=retrieve&val=PROJECT_NAME%3D%22TCGA%22%20and%20LOAD_DATE%3E%3D%22" + latestLoaddate + "%22&size=" + querysize + "&dopt=xml_info&dispmax=" + pagesize + "&page=" + p; //String pagesquery = "http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?&cmd=retrieve&val=project_name%3D%22TCGA%22%20and%20load_date%3E%3D%2204%2F09%2F2008%22&dopt=info&size=490552&dispmax=5&page=4&next=%3E%3E URL url = new URL(pagesquery); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); //System.out.println( "url = " + url ); is = connection.getInputStream(); //noinspection IOResourceOpenedButNotSafelyClosed br = new BufferedReader(new InputStreamReader(is)); String line; fw = new FileWriter(downloaddir + File.separator + xmlfiles + "." + p + ".xml"); //fw= new FileWriter("C:\\larry\\WorkingDoc\\"+ latestLoaddate + "\\" + xmlfiles + "." + p + ".xml"); //noinspection IOResourceOpenedButNotSafelyClosed bw = new BufferedWriter(fw); bw.write("<traces>"); while ((line = br.readLine()) != null) { for (int i = 0; i < fieldsWeCareForNow.length; i++) { if (line.contains(fieldsWeCareForNow[i])) { if (line.contains("<pre><trace>")) { line = "<trace>"; //System.out.println(line); bw.write(line); bw.newLine(); } else { //System.out.println(line); bw.write(line); bw.newLine(); } } } } bw.write("</traces>"); //fw.close(); is.close(); connection.disconnect(); } catch (MalformedURLException e) { getLogger().logToLogger(Level.ERROR, "DownloadNCBITraceXML saveXMLFiles MalformedURLException " + e.toString()); } catch (IOException e) { getLogger().logToLogger(Level.ERROR, "DownloadNCBITraceXML saveXMLFiles IOException " + e.toString()); } finally { IOUtils.closeQuietly(br); IOUtils.closeQuietly(bw); } }
From source file:gr.seab.r2rml.beans.Generator.java
public void createTriples(MappingDocument mappingDocument) { verbose = properties.containsKey("default.verbose") && properties.getProperty("default.verbose").contains("true"); storeOutputModelInTdb = properties.containsKey("jena.storeOutputModelUsingTdb") && properties.getProperty("jena.storeOutputModelUsingTdb").contains("true"); incremental = !storeOutputModelInTdb && properties.containsKey("default.incremental") && properties.getProperty("default.incremental").contains("true"); encodeURLs = properties.containsKey("jena.encodeURLs") && properties.getProperty("jena.encodeURLs").contains("true"); writeReifiedModel = incremental;// w ww . ja v a 2s. c om forceUri = properties.containsKey("default.forceURI") && properties.getProperty("default.forceURI").contains("true"); String destinationFileName = properties.getProperty("jena.destinationFileName"); int dot = destinationFileName.lastIndexOf('.') > -1 ? destinationFileName.lastIndexOf('.') : destinationFileName.length(); String reifiedModelFileName = destinationFileName.substring(0, dot) + "-reified" + destinationFileName.substring(dot); logModel = ModelFactory.createDefaultModel(); String logNs = properties.getProperty("default.namespace"); logModel.setNsPrefix("log", logNs); if (incremental) { InputStream isMap = FileManager.get().open(reifiedModelFileName); resultModel = ModelFactory.createDefaultModel(); try { resultModel.read(isMap, null, "N-TRIPLE"); } catch (Exception e) { log.error(e.toString()); log.error("Error reading last run model. Cannot proceed with incremental, going for a full run."); // Please change property default.incremental in file r2rml.properties to false. resultModel.setNsPrefixes(mappingDocument.getPrefixes()); incremental = false; writeReifiedModel = true; //System.exit(0); } String logFilename = properties.getProperty("default.log"); InputStream isMapLog = FileManager.get().open(logFilename); try { logModel.read(isMapLog, properties.getProperty("default.namespace"), properties.getProperty("mapping.file.type")); if (incremental) log.info("Going to dump incrementally, based on log file " + properties.getProperty("default.log")); } catch (Exception e) { log.error(e.toString()); log.error("Error reading log. Cannot proceed with incremental, going for a full run."); //Please change property default.incremental in file r2rml.properties to false. incremental = false; writeReifiedModel = true; //System.exit(0); } //remove any old statements. Set instead of a List would disallow duplicates but perform worse ArrayList<Statement> statementsToRemove = new ArrayList<Statement>(); StmtIterator stmtIter = resultModel.listStatements(); while (stmtIter.hasNext()) { Statement stmt = stmtIter.next(); Resource type = stmt.getSubject().getPropertyResourceValue(RDF.type); if (type == null || !type.equals(RDF.Statement)) { statementsToRemove.add(stmt); } } stmtIter.close(); resultModel.remove(statementsToRemove); //.toArray(new Statement[statementsToRemove.size()])); log.info("Removed " + statementsToRemove.size() + " old statements."); } boolean executeAllMappings = false; int mappingsExecuted = 0; //if there are no reified statements in the result model, this means that last time incremental was set to false //so we need to re-create the model only reified statements if (incremental) { try { RSIterator rsIter = resultModel.listReifiedStatements(); if (!rsIter.hasNext()) { executeAllMappings = true; } rsIter.close(); } catch (Exception e) { log.error(e.toString()); log.error("Error trying to read destination file. Forcing full mapping."); executeAllMappings = true; } try { Resource r = logModel.getResource(logNs + "destinationFile"); Statement stmt1 = r.getProperty(logModel.getProperty(logNs + "destinationFileSize")); Long fileSizeInLogFile = Long.valueOf(stmt1.getObject().toString()); Long actualFileSize = new Long(new File(destinationFileName).length()); if (fileSizeInLogFile.longValue() != actualFileSize.longValue()) { log.info("Destination file size was found " + actualFileSize + " bytes while it should be " + fileSizeInLogFile + " bytes. Forcing full mapping."); executeAllMappings = true; } Statement stmt2 = r.getProperty(logModel.getProperty(logNs + "reifiedModelFileSize")); Long reifiedModelFileSizeInLogFile = Long.valueOf(stmt2.getObject().toString()); Long actualReifiedModelFileSize = new Long(new File(reifiedModelFileName).length()); if (reifiedModelFileSizeInLogFile.longValue() != actualReifiedModelFileSize.longValue()) { log.info("Destination reified model file size was found " + actualFileSize + " bytes while it should be " + fileSizeInLogFile + " bytes. Forcing full mapping."); executeAllMappings = true; } } catch (Exception e) { log.error(e.toString()); log.error("Error trying to read log file. Forcing full mapping."); executeAllMappings = true; } } int iterCount = 0; for (LogicalTableMapping logicalTableMapping : mappingDocument.getLogicalTableMappings()) { boolean executeMapping = true; if (incremental) { HashMap<String, String> lastRunStatistics = new HashMap<String, String>(); Resource lastRunLogicalTableMapping = logModel.getResource(logicalTableMapping.getUri()); StmtIterator iter = lastRunLogicalTableMapping.listProperties(); while (iter.hasNext()) { Statement stmt = iter.next(); Property prop = stmt.getPredicate(); RDFNode node = stmt.getObject(); if (verbose) log.info("Found in last time log " + prop.getLocalName() + " " + node.toString()); lastRunStatistics.put(prop.getLocalName(), node.toString()); } iter.close(); //selectQueryHash logicalTableMappingHash selectQueryResultsHash tripleCount timestamp String selectQueryHash = util.md5(logicalTableMapping.getView().getSelectQuery().getQuery()); String logicalTableMappingHash = util.md5(logicalTableMapping); java.sql.Statement st = db.newStatement(); try { ResultSet rsSelectQueryResultsHash = st .executeQuery(logicalTableMapping.getView().getSelectQuery().getQuery()); String selectQueryResultsHash = util.md5(rsSelectQueryResultsHash); if (selectQueryHash.equals(lastRunStatistics.get("selectQueryHash")) && logicalTableMappingHash.equals(lastRunStatistics.get("logicalTableMappingHash")) && selectQueryResultsHash.equals(lastRunStatistics.get("selectQueryResultsHash"))) { executeMapping = false || executeAllMappings; if (verbose) { if (!executeMapping) { log.info("Will skip triple generation from " + logicalTableMapping.getUri() + ". Found the same (a) select query (b) logical table mapping and (c) select query results."); } } } } catch (SQLException sqle) { log.error( "Failed to execute query: " + logicalTableMapping.getView().getSelectQuery().getQuery(), sqle); } finally { try { st.close(); } catch (SQLException e) { /* ignore exception */ } } } ArrayList<String> subjects = new ArrayList<String>(); if (executeMapping) { mappingsExecuted++; if (incremental) { //Since we are executing the mapping again, we are removing old statements and their respective reifications ArrayList<ReifiedStatement> reificationsToRemove = new ArrayList<ReifiedStatement>(); resultModel.listReifiedStatements(); RSIterator rsExistingIter = resultModel.listReifiedStatements(); while (rsExistingIter.hasNext()) { ReifiedStatement rstmt = rsExistingIter.next(); Statement st = rstmt.getProperty(DC.source); String source = st.getObject().toString(); if (mappingDocument.findLogicalTableMappingByUri(source) != null) { if (logicalTableMapping.getUri().equals(source)) { reificationsToRemove.add(rstmt); } } else { reificationsToRemove.add(rstmt); } } rsExistingIter.close(); //Remove the reified statement itself, i.e. [] a rdf:Statement ; rdf:subject ... ; rdf:predicate ; rdf:object ... ; //but also remove the statements having this statement as a subject and dc:source as a property ArrayList<Statement> statementsToRemove = new ArrayList<Statement>(); for (ReifiedStatement rstmt : reificationsToRemove) { statementsToRemove.add(rstmt.getRequiredProperty(DC.source)); //Also remove the statement itself statementsToRemove.add(rstmt.getStatement()); } for (ReifiedStatement rstmt : reificationsToRemove) { resultModel.removeReification(rstmt); } log.info("Removing " + statementsToRemove.size() + " old statements and " + reificationsToRemove.size() + " old reified statements from source " + logicalTableMapping.getUri() + "."); //log.info("statementsToRemove are " + statementsToRemove.size() + " statements."); resultModel.remove(statementsToRemove); //.toArray(new Statement[statementsToRemove.size()])); } //Then insert the newly generated ones SelectQuery selectQuery = logicalTableMapping.getView().getSelectQuery(); java.sql.Statement sqlStmt = db.newStatement(); try { ResultSet rs = sqlStmt.executeQuery(selectQuery.getQuery()); if (verbose) log.info("Iterating over " + selectQuery.getQuery()); rs.beforeFirst(); while (rs.next()) { Template subjectTemplate = logicalTableMapping.getSubjectMap().getTemplate(); String resultSubject = (subjectTemplate != null) ? util.fillTemplate(subjectTemplate, rs, encodeURLs) : null; if (resultSubject != null) { //if (StringUtils.isNotEmpty(logicalTableMapping.getSubjectMap().getClassUri())) { if (logicalTableMapping.getSubjectMap().getClassUris() != null && logicalTableMapping.getSubjectMap().getClassUris().size() > 0) { for (String classUri : logicalTableMapping.getSubjectMap().getClassUris()) { Resource s = null; //resultModel.createResource(); if (verbose) log.info("Subject termType: " + subjectTemplate.getTermType().toString()); //we cannot have a literal as a subject, it has to be an iri or a blank node if (subjectTemplate.getTermType() == TermType.IRI || subjectTemplate.getTermType() == TermType.LITERAL) { s = resultModel.createResource(resultSubject); } else if (subjectTemplate.getTermType() == TermType.BLANKNODE) { s = resultModel.createResource(AnonId.create(resultSubject)); if (verbose) log.info("Created blank node subject with id " + s.getId()); } else { s = resultModel.createResource(resultSubject); } Property p = RDF.type; Resource o = resultModel.createResource(classUri); Statement st = resultModel.createStatement(s, p, o); if (verbose) log.info("Adding triple: <" + s.getURI() + ">, <" + p.getURI() + ">, <" + o.getURI() + ">"); subjects.add(st.getSubject().getURI()); if (incremental || writeReifiedModel) { ReifiedStatement rst = resultModel.createReifiedStatement(st); rst.addProperty(DC.source, resultModel.createResource(logicalTableMapping.getUri())); } else { resultModel.add(st); } } } //for (int i = 0; i < logicalTableMapping.getPredicateObjectMaps() resultPredicates.size(); i++) { for (PredicateObjectMap predicateObjectMap : logicalTableMapping .getPredicateObjectMaps()) { Resource s = null; //resultModel.createResource(); if (verbose) log.info("Subject termType: " + subjectTemplate.getTermType().toString()); if (subjectTemplate.getTermType() == TermType.IRI || subjectTemplate.getTermType() == TermType.LITERAL) { s = resultModel.createResource(resultSubject); } else if (subjectTemplate.getTermType() == TermType.BLANKNODE) { s = resultModel.createResource(AnonId.create(resultSubject)); if (verbose) log.info("Created blank node subject with id " + s.getId()); } else { s = resultModel.createResource(resultSubject); } Template objectTemplate = predicateObjectMap.getObjectTemplate(); if (verbose) { if (objectTemplate != null && objectTemplate.getTermType() != null) { log.info("Object type is " + objectTemplate.getTermType().toString()); } else { log.info("Object type is null"); } } for (String predicate : predicateObjectMap.getPredicates()) { Property p = resultModel.createProperty(predicate); if (objectTemplate != null && objectTemplate.getTermType() != TermType.AUTO) { //Literal o = resultModel.createLiteral(u.fillTemplate(predicateObjectMap.getObjectTemplate(), rs)); //if (!util.isUriTemplate(resultModel, predicateObjectMap.getObjectTemplate())) { if (objectTemplate.getTermType() == TermType.LITERAL) { Literal o = null; if (predicateObjectMap.getObjectTemplate().getLanguage() == null || "" .equals(predicateObjectMap.getObjectTemplate().getLanguage())) { String value = util.fillTemplate(objectTemplate, rs, encodeURLs); if (value != null) { if (predicateObjectMap.getDataType() != null) { o = resultModel.createTypedLiteral(value, predicateObjectMap.getDataType()); if (verbose) log.info("Adding typed literal triple: <" + s.getURI() + ">, <" + p.getURI() + ">, \"" + o.getString() + "\"^^" + predicateObjectMap.getDataType().getURI()); } else { o = resultModel.createLiteral(value); if (verbose) log.info("Adding literal triple: <" + s.getURI() + ">, <" + p.getURI() + ">, \"" + o.getString() + "\""); } } } else { String language = predicateObjectMap.getObjectTemplate() .getLanguage(); String value = util.fillTemplate(objectTemplate, rs, encodeURLs); if (value != null) { o = resultModel.createLiteral(value, language); if (verbose) log.info("Adding literal triple with language: <" + s.getURI() + ">, <" + p.getURI() + ">, \"" + o.getString() + "\"@" + o.getLanguage()); } } if (o != null) { if (forceUri && o.getString().startsWith("http")) { if (verbose) log.info( "Changing literal to URI: <" + o.getString() + ">"); RDFNode oToUri = resultModel.createResource(o.getString()); Statement st = resultModel.createStatement(s, p, oToUri); subjects.add(st.getSubject().getURI()); if (incremental || writeReifiedModel) { ReifiedStatement rst = resultModel .createReifiedStatement(st); rst.addProperty(DC.source, resultModel .createResource(logicalTableMapping.getUri())); } else { resultModel.add(st); } } else { Statement st = resultModel.createStatement(s, p, o); subjects.add(st.getSubject().getURI()); if (incremental || writeReifiedModel) { ReifiedStatement rst = resultModel .createReifiedStatement(st); rst.addProperty(DC.source, resultModel .createResource(logicalTableMapping.getUri())); } else { resultModel.add(st); } } } } else if (objectTemplate.getTermType() == TermType.IRI) { if (verbose) log.info("Filling in IRI template " + objectTemplate.getText()); String value = util.fillTemplate(objectTemplate, rs, encodeURLs); if (value != null) { RDFNode o = resultModel.createResource(value); if (verbose) log.info("Adding resource triple: <" + s.getURI() + ">, <" + p.getURI() + ">, <" + o.asResource().getURI() + ">"); Statement st = resultModel.createStatement(s, p, o); subjects.add(st.getSubject().getURI()); if (incremental || writeReifiedModel) { ReifiedStatement rst = resultModel.createReifiedStatement(st); rst.addProperty(DC.source, resultModel .createResource(logicalTableMapping.getUri())); } else { resultModel.add(st); } } } else if (objectTemplate.getTermType() == TermType.BLANKNODE) { if (verbose) log.info("filling in blanknode template " + objectTemplate.getText()); String value = util.fillTemplate(objectTemplate, rs, encodeURLs); if (value != null) { RDFNode o = resultModel.createResource(AnonId.create(value)); if (verbose) log.info("Adding resource triple: <" + s.getURI() + ">, <" + p.getURI() + ">, <" + o.asResource().getURI() + ">"); Statement st = resultModel.createStatement(s, p, o); subjects.add(st.getSubject().getURI()); if (incremental || writeReifiedModel) { ReifiedStatement rst = resultModel.createReifiedStatement(st); rst.addProperty(DC.source, resultModel .createResource(logicalTableMapping.getUri())); } else { resultModel.add(st); } } } } else if (predicateObjectMap.getObjectColumn() != null) { String field = predicateObjectMap.getObjectColumn(); if (field.startsWith("\"") && field.endsWith("\"")) { field = field.replaceAll("\"", ""); //log.info("Cleaning. Field is now " + field); } String test = getStringValue(field, rs); BaseDatatype xsdDataType = findFieldDataType(field, rs); predicateObjectMap.setDataType(xsdDataType); if (test != null) { Literal o; if (predicateObjectMap.getObjectTemplate().getLanguage() == null || "" .equals(predicateObjectMap.getObjectTemplate().getLanguage())) { if (predicateObjectMap.getDataType() != null) { o = resultModel.createTypedLiteral(test, predicateObjectMap.getDataType()); if (verbose) log.info("Adding typed literal triple: <" + s.getURI() + ">, <" + p.getURI() + ">, \"" + o.getString() + "\"^^" + predicateObjectMap.getDataType().getURI()); } else { o = resultModel.createLiteral(test); if (verbose) log.info("Adding literal triple: <" + s.getURI() + ">, <" + p.getURI() + ">, \"" + o.getString() + "\""); } } else { String language = predicateObjectMap.getObjectTemplate() .getLanguage(); o = resultModel.createLiteral(test, language); if (verbose) log.info("Adding triple with language: <" + s.getURI() + ">, <" + p.getURI() + ">, \"" + o.getString() + "\"@" + predicateObjectMap.getObjectTemplate().getLanguage()); } Statement st = resultModel.createStatement(s, p, o); subjects.add(st.getSubject().getURI()); if (incremental || writeReifiedModel) { ReifiedStatement rst = resultModel.createReifiedStatement(st); rst.addProperty(DC.source, resultModel.createResource(logicalTableMapping.getUri())); } else { resultModel.add(st); } } } else if (predicateObjectMap.getRefObjectMap() != null && predicateObjectMap .getRefObjectMap().getParentTriplesMapUri() != null) { if (predicateObjectMap.getRefObjectMap().getParent() != null && predicateObjectMap.getRefObjectMap().getChild() != null) { if (verbose) log.info( "Object URIs will be the subjects of the referenced triples, created previously by the logical table mapping with the uri " + predicateObjectMap.getRefObjectMap() .getParentTriplesMapUri() + " with a rr:joinCondition containing rr:child " + predicateObjectMap.getRefObjectMap().getChild() + " and rr:parent " + predicateObjectMap.getRefObjectMap().getParent()); LogicalTableMapping l = mappingDocument.findLogicalTableMappingByUri( predicateObjectMap.getRefObjectMap().getParentTriplesMapUri()); String childValue = rs.getString(predicateObjectMap.getRefObjectMap() .getChild().replaceAll("\"", "")); //table names need to be e.g. Sport instead of "Sport", and this is why we remove the quotes if (childValue != null && !StringUtils.isNumeric(childValue)) { childValue = "'" + childValue + "'"; } if (verbose) log.info("child value is " + childValue); SelectQuery parentQuery; if (l.getSubjectMap().getSelectQuery() != null) { parentQuery = l.getSubjectMap().getSelectQuery(); } else { parentQuery = l.getView().getSelectQuery(); //assure the select query is not null } String parentQueryText = parentQuery.getQuery(); if (parentQuery.getTables().size() == 1) { String parentFieldName = predicateObjectMap.getRefObjectMap() .getParent(); if (mappingDocument.getDatabaseType() == DatabaseType.MYSQL) parentFieldName = parentFieldName.replaceAll("\"", ""); //in mysql, table names must not be enclosed in quotes boolean containsWhere = parentQueryText.toLowerCase() .contains("where"); String addition = (containsWhere ? " AND " : " WHERE ") + parentFieldName + " = " + childValue; int order = parentQueryText.toUpperCase().indexOf("ORDER BY"); if (order != -1) { String orderCondition = parentQueryText.substring(order); parentQueryText = parentQueryText.substring(0, order) + addition + " " + orderCondition; } else { parentQueryText += addition; } } else { log.error("In the logical table mapping <" + logicalTableMapping.getUri() + ">, the SQL query that generates the parent triples in the parent logical table mapping <" + l.getUri() + "> contains results from more than one tables. " + " Consider using rr:tableName instead of rr:sqlQuery in the parent logical table mapping. Terminating."); System.exit(1); } if (verbose) log.info("Modified parent SQL query to " + parentQuery); java.sql.Statement parentSqlStmt = db.newStatement(); ResultSet rsParent = parentSqlStmt.executeQuery(parentQueryText); rsParent.beforeFirst(); while (rsParent.next()) { Template parentTemplate = l.getSubjectMap().getTemplate(); String parentSubject = util.fillTemplate(parentTemplate, rsParent, encodeURLs); RDFNode o = resultModel.createResource(parentSubject); Statement st = resultModel.createStatement(s, p, o); if (verbose) log.info( "Adding triple referring to a parent statement subject: <" + s.getURI() + ">, <" + p.getURI() + ">, <" + o.asResource().getURI() + ">"); subjects.add(st.getSubject().getURI()); if (incremental || writeReifiedModel) { ReifiedStatement rst = resultModel.createReifiedStatement(st); rst.addProperty(DC.source, resultModel .createResource(logicalTableMapping.getUri())); } else { resultModel.add(st); } } rsParent.close(); parentSqlStmt.close(); } else { if (verbose) log.info( "Object URIs will be the subjects of the referenced triples, created previously by the logical table mapping with the uri " + predicateObjectMap.getRefObjectMap() .getParentTriplesMapUri()); LogicalTableMapping l = mappingDocument.findLogicalTableMappingByUri( predicateObjectMap.getRefObjectMap().getParentTriplesMapUri()); if (verbose) log.info("The logical table mapping with the uri " + l.getUri() + " has already generated " + l.getSubjects().size() + " triples."); for (String existingStatementSubject : l.getSubjects()) { String existingSubjectUri = existingStatementSubject; RDFNode o = resultModel.createResource(existingSubjectUri); Statement st = resultModel.createStatement(s, p, o); if (verbose) log.info( "Adding triple referring to an existing statement subject: <" + s.getURI() + ">, <" + p.getURI() + ">, <" + o.asResource().getURI() + ">"); subjects.add(st.getSubject().getURI()); if (incremental || writeReifiedModel) { ReifiedStatement rst = resultModel.createReifiedStatement(st); rst.addProperty(DC.source, resultModel .createResource(logicalTableMapping.getUri())); } else { resultModel.add(st); } } } } } } } iterCount++; if (iterCount % 10000 == 0) { log.info("At " + iterCount); //System.out.println("At " + iterCount); } } rs.close(); sqlStmt.close(); } catch (SQLException e) { e.printStackTrace(); } finally { try { sqlStmt.close(); } catch (Exception e) { } } } else { log.info("Skipping triple generation from " + logicalTableMapping.getUri() + ". Nothing changed here."); } logicalTableMapping.setSubjects(subjects); if (verbose) log.info("Generated " + subjects.size() + " statements from table mapping <" + logicalTableMapping.getUri() + ">"); } mappingDocument.getTimestamps().add(Calendar.getInstance().getTimeInMillis()); //2 Generated jena model in memory log.info("Finished generating jena model in memory."); if (!incremental || mappingsExecuted > 0) { if (!storeOutputModelInTdb) { String destinationFileSyntax = properties.getProperty("jena.destinationFileSyntax"); String showXmlDeclarationProperty = properties.getProperty("jena.showXmlDeclaration"); boolean showXmlDeclaration = (destinationFileSyntax.equalsIgnoreCase("RDF/XML") || destinationFileSyntax.equalsIgnoreCase("RDF/XML-ABBREV")) && showXmlDeclarationProperty.equalsIgnoreCase("true"); if ((!incremental && writeReifiedModel) || incremental) { log.info("Generating clean model."); Model cleanModel = ModelFactory.createDefaultModel(); cleanModel.setNsPrefixes(resultModel.getNsPrefixMap()); //ArrayList<Statement> cleanStatements = new ArrayList<Statement>(); RSIterator rsIter = resultModel.listReifiedStatements(); long addedStatements = 0; while (rsIter.hasNext()) { ReifiedStatement rstmt = rsIter.next(); //Statement st = rstmt.getStatement(); cleanModel.add(rstmt.getStatement()); //cleanStatements.add(rstmt.getStatement()); addedStatements++; if (verbose && addedStatements % 10000 == 0) log.info("At " + addedStatements); } rsIter.close(); //If no reified statements were found, try actual statements //if (!cleanModel.listStatements().hasNext()) { if (addedStatements == 0) { log.info("No reified statements were found, business as usual."); StmtIterator stmtIter = resultModel.listStatements(); while (stmtIter.hasNext()) { Statement st = stmtIter.nextStatement(); //cleanStatements.add(st); cleanModel.add(st); addedStatements++; if (verbose && addedStatements % 10000 == 0) log.info("At " + addedStatements); } stmtIter.close(); } //log.info("Adding " + cleanStatements.size() + " statements to clean model."); //cleanModel.add(cleanStatements); //cleanStatements.clear(); //free some memory log.info("Writing clean model to " + destinationFileName); //log.info("Clean model has " + cleanModel.listStatements().toList().size() + " statements."); //Could as well be an empty model, this is why we check if it actually has any triples //if (cleanModel.listStatements().hasNext()) { if (!cleanModel.isEmpty()) { try { Calendar c0 = Calendar.getInstance(); long t0 = c0.getTimeInMillis(); //Force showXmlDeclaration BufferedWriter out = new BufferedWriter(new FileWriter(destinationFileName)); if (showXmlDeclaration) { out.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>"); out.newLine(); } cleanModel.write(out, destinationFileSyntax); out.close(); Calendar c1 = Calendar.getInstance(); long t1 = c1.getTimeInMillis(); log.info("Writing clean model to disk took " + (t1 - t0) + " milliseconds."); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } log.info("Clean model has " + cleanModel.size() + " statements."); cleanModel.close(); } else { log.info("Nothing to write."); } mappingDocument.getTimestamps().add(Calendar.getInstance().getTimeInMillis()); //3 Wrote clean model to disk. //log.info("3 Wrote clean model to disk."); } else { log.info("Full run: Writing model to " + destinationFileName + ". Model has " + resultModel.size() + " statements."); try { Calendar c0 = Calendar.getInstance(); long t0 = c0.getTimeInMillis(); BufferedWriter out = new BufferedWriter(new FileWriter(destinationFileName)); if (showXmlDeclaration) { out.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>"); out.newLine(); } resultModel.write(out, destinationFileSyntax); out.close(); Calendar c1 = Calendar.getInstance(); long t1 = c1.getTimeInMillis(); log.info("Writing model to disk took " + (t1 - t0) + " milliseconds."); mappingDocument.getTimestamps().add(Calendar.getInstance().getTimeInMillis()); //3 Wrote clean model to disk //log.info("3 Wrote clean model to disk"); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // StmtIterator stmtIter = resultModel.listStatements(); // while (stmtIter.hasNext()) { // Statement st = stmtIter.nextStatement(); // cleanModel.add(st); // } // stmtIter.close(); } if (writeReifiedModel) { log.info("Writing reified model to " + reifiedModelFileName + "."); try { Calendar c0 = Calendar.getInstance(); long t0 = c0.getTimeInMillis(); BufferedWriter out = new BufferedWriter(new FileWriter(reifiedModelFileName)); resultModel.write(out, "N-TRIPLE"); //properties.getProperty("jena.destinationFileSyntax")); out.close(); Calendar c1 = Calendar.getInstance(); long t1 = c1.getTimeInMillis(); log.info("Writing reified model to disk took " + (t1 - t0) + " milliseconds."); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } log.info("Reified model has " + resultModel.size() + " statements."); } else { log.info("Not Writing reified model."); } } else { log.info("Storing model to database. Model has " + resultModel.size() + " statements."); Calendar c0 = Calendar.getInstance(); long t0 = c0.getTimeInMillis(); //Sync start Dataset dataset = TDBFactory.createDataset(properties.getProperty("jena.tdb.directory")); dataset.begin(ReadWrite.WRITE); Model existingDbModel = dataset.getDefaultModel(); log.info("Existing model has " + existingDbModel.size() + " statements."); List<Statement> statementsToRemove = new ArrayList<Statement>(); List<Statement> statementsToAdd = new ArrayList<Statement>(); //first clear the ones from the old model StmtIterator stmtExistingIter = existingDbModel.listStatements(); while (stmtExistingIter.hasNext()) { Statement stmt = stmtExistingIter.nextStatement(); if (!resultModel.contains(stmt)) { statementsToRemove.add(stmt); } } stmtExistingIter.close(); log.info("Will remove " + statementsToRemove.size() + " statements."); //then add the new ones Model differenceModel = resultModel.difference(existingDbModel); StmtIterator stmtDiffIter = differenceModel.listStatements(); while (stmtDiffIter.hasNext()) { Statement stmt = stmtDiffIter.nextStatement(); statementsToAdd.add(stmt); } stmtDiffIter.close(); differenceModel.close(); log.info("Will add " + statementsToAdd.size() + " statements."); existingDbModel.remove(statementsToRemove); existingDbModel.add(statementsToAdd); dataset.commit(); dataset.end(); //Sync end Calendar c1 = Calendar.getInstance(); long t1 = c1.getTimeInMillis(); log.info("Updating model in database took " + (t1 - t0) + " milliseconds."); mappingDocument.getTimestamps().add(Calendar.getInstance().getTimeInMillis()); //3 Wrote clean model to tdb. //log.info("3 Wrote clean model to tdb."); } } else { log.info("Skipping writing the output model. No changes detected."); mappingDocument.getTimestamps().add(Calendar.getInstance().getTimeInMillis()); //3 Finished writing output model. No changes detected. //log.info("3 Finished writing output model. No changes detected."); } resultModel.close(); //log the results Calendar c0 = Calendar.getInstance(); long t0 = c0.getTimeInMillis(); try { String logFile = properties.getProperty("default.log"); log.info("Logging results to " + new File(logFile).getAbsolutePath()); //overwrite old values logModel = ModelFactory.createDefaultModel(); logModel.setNsPrefix("log", logNs); if (verbose) log.info("Logging destination file size"); Property pFileSize = logModel.createProperty(logNs + "destinationFileSize"); long fileSize = new File(destinationFileName).length(); Literal oFileSize = logModel.createLiteral(String.valueOf(fileSize)); logModel.add(logModel.createResource(logNs + "destinationFile"), pFileSize, oFileSize); if (writeReifiedModel) { if (verbose) log.info("Logging reified model file size"); Property pReifiedModelFileSize = logModel.createProperty(logNs + "reifiedModelFileSize"); long reifiedModelfileSize = new File(reifiedModelFileName).length(); Literal oReifiedModelFileSize = logModel.createLiteral(String.valueOf(reifiedModelfileSize)); logModel.add(logModel.createResource(logNs + "destinationFile"), pReifiedModelFileSize, oReifiedModelFileSize); //run on the table mappings for (LogicalTableMapping logicalTableMapping : mappingDocument.getLogicalTableMappings()) { Resource s = logModel.createResource(logicalTableMapping.getUri()); if (verbose) log.info("Logging selectQueryHash"); Property pSelectQueryHash = logModel.createProperty(logNs + "selectQueryHash"); String selectQuery = logicalTableMapping.getView().getSelectQuery().getQuery(); Literal oSelectQueryHash = logModel.createLiteral(String.valueOf(util.md5(selectQuery))); logModel.add(s, pSelectQueryHash, oSelectQueryHash); if (verbose) log.info("Logging logicalTableMappingHash"); Property pLogicalTableMappingHash = logModel.createProperty(logNs + "logicalTableMappingHash"); String logicalTableMappingHash = util.md5(logicalTableMapping); Literal oLogicalTableMappingHash = logModel .createLiteral(String.valueOf(logicalTableMappingHash)); logModel.add(s, pLogicalTableMappingHash, oLogicalTableMappingHash); if (verbose) log.info("Logging selectQueryResultsHash"); Property pSelectQueryResultsHash = logModel.createProperty(logNs + "selectQueryResultsHash"); java.sql.Statement stmt = db.newStatement(); try { ResultSet rsSelectQueryResultsHash = stmt .executeQuery(logicalTableMapping.getView().getSelectQuery().getQuery()); Literal oSelectQueryResultsHash = logModel .createLiteral(String.valueOf(util.md5(rsSelectQueryResultsHash))); logModel.add(s, pSelectQueryResultsHash, oSelectQueryResultsHash); } catch (SQLException e) { log.error("Failed to execute query: " + logicalTableMapping.getView().getSelectQuery().getQuery(), e); } finally { try { stmt.close(); } catch (SQLException e) { } } // if (verbose) log.info("Logging tripleCount"); // Property pTripleCount = logModel.createProperty(logNs + "tripleCount"); // Literal oTripleCount = logModel.createLiteral(String.valueOf(logicalTableMapping.getTriples().size())); // logModel.add(s, pTripleCount, oTripleCount); } } if (verbose) log.info("Logging timestamp"); Property pTimestamp = logModel.createProperty(logNs + "timestamp"); Literal oTimestamp = logModel.createLiteral(String.valueOf(new Date())); logModel.add(logModel.createResource(logNs + "destinationFile"), pTimestamp, oTimestamp); BufferedWriter out = new BufferedWriter(new FileWriter(properties.getProperty("default.log"))); logModel.write(out, properties.getProperty("mapping.file.type")); out.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } Calendar c1 = Calendar.getInstance(); long t1 = c1.getTimeInMillis(); log.info("Logging took " + (t1 - t0) + " milliseconds."); mappingDocument.getTimestamps().add(Calendar.getInstance().getTimeInMillis()); //4 Finished logging. //log.info("4 Finished logging."); }
From source file:gdsc.smlm.ij.plugins.pcpalm.PCPALMFitting.java
private void writeHeader(BufferedWriter output, String header, String value) throws IOException { output.write("#"); output.write(header);// ww w . j av a 2s . c o m output.write(" = "); output.write(value); output.newLine(); }
From source file:de.dfki.km.perspecting.obie.experiments.PhraseExperiment.java
@Test public void testDifferentPrefixLengths() { final String template = "SELECT * WHERE {?s ?p ?o}"; try {/*from w w w . ja v a 2 s . c om*/ URL url = new URL("http://en.wikipedia.org/wiki/Kaiserslautern"); Document document = pipeline.createDocument(FileUtils.toFile(url), url.toURI(), MediaType.HTML, template, Language.EN); for (int step = 0; pipeline.hasNext(step) && step <= 5; step = pipeline.execute(step, document)) { System.out.println(step); } final BufferedWriter bw = new BufferedWriter( new FileWriter($SCOOBIE_HOME + "results/response_time_prefix_hashing.csv")); for (int SIZE = 1; SIZE < 11; SIZE++) { TreeSet<String> hist = new TreeSet<String>(); int count = 0; for (TokenSequence<String> i : document.getNounPhrases()) { String[] words = i.toString().split("[\\s]+"); for (String word : words) { count++; if (word.length() >= SIZE) hist.add(word.substring(0, SIZE)); else hist.add(word); } } StringBuilder query = new StringBuilder(); query.append("SELECT count(*) FROM index_literals, symbols WHERE " + "( symbols.object = index_literals.index AND substr(index_literals.literal,1," + SIZE + ") IN ("); for (String p : hist) { query.append("(?) , "); } query.setLength(query.length() - 3); query.append("))"); System.out.println(query.toString()); Connection c = pool.getConnection(); PreparedStatement stmtGetDatatypePropertyValues = c.prepareStatement(query.toString()); int paramIndex = 0; for (String p : hist) { stmtGetDatatypePropertyValues.setString(++paramIndex, p); } long start = System.currentTimeMillis(); ResultSet rs = stmtGetDatatypePropertyValues.executeQuery(); long end = System.currentTimeMillis(); while (rs.next()) { bw.append(SIZE + "\t" + (end - start) + "\t" + rs.getInt(1)); bw.newLine(); } stmtGetDatatypePropertyValues.close(); c.close(); } bw.close(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:analytics.storage.store2csv.java
@Override public void storeElementValueData(HashMap<String, Integer> data, String metricName, String dataProvider, String analysisType, String headerColumn, String element, Logger logger, int time) { // TODO Auto-generated method stub String sFileName = dataProvider + analysisType + ".csv"; Properties props = new Properties(); try {/*from www.ja v a 2 s. c o m*/ props.load(new FileInputStream("configure.properties")); } catch (FileNotFoundException e1) { // TODO Auto-generated catch block e1.printStackTrace(); System.exit(-1); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); System.exit(-1); } ; File anls = new File(props.getProperty(AnalyticsConstants.resultsPath) + "Analysis_Results"); if (!anls.exists()) anls.mkdir(); else { // if (temporal == false) { // FileUtils.deleteQuietly(anls); // anls.mkdir(); // } } File dir = new File(anls, dataProvider); if (!dir.exists()) dir.mkdir(); File file = new File(dir, sFileName); FileWriter writer; BufferedWriter bw = null; BufferedReader reader = null; try { if (file.exists() && time == 0) file.delete(); // if (!file.exists() && time == 0) { writer = new FileWriter(file); bw = new BufferedWriter(writer); createHeaders(bw, metricName, headerColumn); Set<String> keySet = data.keySet(); Iterator<String> iterator = keySet.iterator(); StringBuffer logString = new StringBuffer(); StringBuffer key = new StringBuffer(); while (iterator.hasNext()) { // String key = iterator.next(); key.append(iterator.next()); Integer value = data.get(key.toString()); if (key.toString().contains(",")) key.replace(0, key.length(), key.toString().replace(",", "/")); // key = key.toString().replace(",", "/"); // bw.append(element); // bw.append(','); bw.append(key); logString.append(dataProvider); logString.append(" " + element); logString.append(" " + key.toString().replace(" ", "_")); // logString.append(" " + key.replace(" ", "_")); bw.append(','); bw.append(String.valueOf(value)); logString.append(" " + String.valueOf(value)); bw.newLine(); logger.info(logString.toString()); logString.delete(0, logString.capacity()); key.delete(0, key.length()); } bw.close(); // } else if (file.exists() && time == 0) { // file.delete(); // writer = new FileWriter(file); // bw = new BufferedWriter(writer); // createHeaders(bw, metricName, headerColumn); // // Set<String> keySet = data.keySet(); // Iterator<String> iterator = keySet.iterator(); // StringBuffer logString = new StringBuffer(); // // StringBuffer key = new StringBuffer(); // // while (iterator.hasNext()) { // // String key = iterator.next(); // key.append(iterator.next()); // // Integer value = data.get(key.toString()); // // if (key.toString().contains(",")) // key.replace(0, key.length(), // key.toString().replace(",", "/")); // // key = key.toString().replace(",", "/"); // // // bw.append(element); // // bw.append(','); // bw.append(key); // logString.append(dataProvider); // logString.append(" " + element); // logString.append(" " + key.toString().replace(" ", "_")); // // logString.append(" " + key.replace(" ", "_")); // bw.append(','); // bw.append(String.valueOf(value)); // logString.append(" " + String.valueOf(value)); // bw.newLine(); // // logger.info(logString.toString()); // logString.delete(0, logString.capacity()); // key.delete(0, key.length()); // } // bw.close(); // } else if (file.exists() && time > 0) { // // reader = new BufferedReader(new FileReader(file)); // // File temp = new File(dir, "temp.csv"); // // writer = new FileWriter(temp); // bw = new BufferedWriter(writer); // // String line; // int counter = 0; // // // Set<String> keySet = data.keySet(); // // Iterator<String> iterator = keySet.iterator(); // StringBuffer logString = new StringBuffer(); // StringBuffer key = new StringBuffer(); // while ((line = reader.readLine()) != null) { // String[] split = line.split(","); // // System.out.println(line); // // if (counter == 0) { // line = line + "," + metricName; // bw.append(line); // bw.newLine(); // // } else { // // String key = iterator.next(); // // String key = split[0]; // key.append(split[0]); // Integer value = data.get(key); // // // if (key.contains(",")) // // key = key.replace(",", "/"); // if (key.toString().contains(",")) // key.replace(0, key.length(), key.toString() // .replace(",", "/")); // // line = line + "," + value; // bw.append(line); // logString.append(dataProvider); // logString.append(" " + element); // logString // .append(" " + key.toString().replace(" ", "_")); // logString.append(" " + value); // // bw.newLine(); // // logger.info(logString.toString()); // logString.delete(0, logString.capacity()); // key.delete(0, key.length()); // } // // counter += 1; // // } // bw.close(); // FileUtils.copyFile(temp, file); // temp.delete(); // reader.close(); // // } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { try { if (bw != null) bw.close(); if (reader != null) reader.close(); } catch (IOException ex) { ex.printStackTrace(); } } }
From source file:ffx.xray.Looptimizer.java
@Override public double energyAndGradient(double[] x, double[] gradient) { double e = potential.energyAndGradient(x, gradient); /**//from ww w. j a v a 2s.co m * OSRW is propagated with the slowly varying terms. */ if (state == STATE.FAST) { return e; } if (osrwOptimization && lambda > osrwOptimizationLambdaCutoff) { if (energyCount % osrwOptimizationFrequency == 0) { logger.info(String.format(" OSRW Minimization (Step %d)", energyCount)); // Set Lambda value to 1.0. lambdaInterface.setLambda(1.0); potential.setEnergyTermState(STATE.BOTH); RefinementMinimize refinementMinimize = null; Minimize minimize = null; double xStart[] = null; double xFinal[] = null; // Optimize the system. if (useXRayMinimizer) { refinementMinimize = new RefinementMinimize(diffractionData); int n = refinementMinimize.refinementEnergy.getNumberOfVariables(); xStart = new double[n]; xStart = refinementMinimize.refinementEnergy.getCoordinates(xStart); refinementMinimize.minimize(osrwOptimizationEps); xFinal = new double[n]; xFinal = refinementMinimize.refinementEnergy.getCoordinates(xFinal); } else { minimize = new Minimize(null, potential, null); int n = potential.getNumberOfVariables(); xStart = new double[n]; xStart = potential.getCoordinates(xStart); minimize.minimize(osrwOptimizationEps); xFinal = new double[n]; xFinal = potential.getCoordinates(xFinal); } double minValue; if (useXRayMinimizer) { // Collect the minimum R value. minValue = diffractionData.getRCrystalStat(); } else { // Collect the minimum energy. minValue = potential.getTotalEnergy(); } // If a new minimum has been found, save its coordinates. if (minValue < osrwOptimum) { osrwOptimum = minValue; if (useXRayMinimizer) { logger.info( String.format(" New minimum R found: %16.8f (Step %d).", osrwOptimum, energyCount)); } else { logger.info(String.format(" New minimum energy found: %16.8f (Step %d).", osrwOptimum, energyCount)); } osrwOptimumCoords = xFinal; if (pdbFilter.writeFile(pdbFile, false)) { logger.info(String.format(" Wrote PDB file to " + pdbFile.getName())); } } /** * Reset coordinates for X-ray minimization (parameters may * include B-Factors). */ if (useXRayMinimizer) { refinementMinimize.refinementEnergy.energy(xStart); } /** * Revert to the coordinates, gradient lambda, and RESPA State * prior to optimization. */ potential.setScaling(null); lambdaInterface.setLambda(lambda); potential.setEnergyTermState(state); double eCheck = potential.energyAndGradient(x, gradient); if (abs(eCheck - e) > osrwOptimizationTolerance) { logger.warning(String.format( " OSRW optimization could not revert coordinates %16.8f vs. %16.8f.", e, eCheck)); } } } double biasEnergy = 0.0; dEdLambda = lambdaInterface.getdEdL(); d2EdLambda2 = lambdaInterface.getd2EdL2(); int lambdaBin = binForLambda(lambda); int FLambdaBin = binForFLambda(dEdLambda); double dEdU = dEdLambda; if (propagateLambda) { energyCount++; } /** * Calculate recursion kernel G(L, F_L) and its derivatives with respect * to L and F_L. */ double dGdLambda = 0.0; double dGdFLambda = 0.0; double ls2 = (2.0 * dL) * (2.0 * dL); double FLs2 = (2.0 * dFL) * (2.0 * dFL); for (int iL = -biasCutoff; iL <= biasCutoff; iL++) { int lcenter = lambdaBin + iL; double deltaL = lambda - (lcenter * dL); double deltaL2 = deltaL * deltaL; // Mirror conditions for recursion kernel counts. int lcount = lcenter; double mirrorFactor = 1.0; if (lcount == 0 || lcount == lambdaBins - 1) { mirrorFactor = 2.0; } else if (lcount < 0) { lcount = -lcount; } else if (lcount > lambdaBins - 1) { // Number of bins past the last bin lcount -= (lambdaBins - 1); // Mirror bin lcount = lambdaBins - 1 - lcount; } for (int iFL = -biasCutoff; iFL <= biasCutoff; iFL++) { int FLcenter = FLambdaBin + iFL; /** * If either of the following FL edge conditions are true, then * there are no counts and we continue. */ if (FLcenter < 0 || FLcenter >= FLambdaBins) { continue; } double deltaFL = dEdLambda - (minFLambda + FLcenter * dFL + dFL_2); double deltaFL2 = deltaFL * deltaFL; double weight = mirrorFactor * recursionKernel[lcount][FLcenter]; double bias = weight * biasMag * exp(-deltaL2 / (2.0 * ls2)) * exp(-deltaFL2 / (2.0 * FLs2)); biasEnergy += bias; dGdLambda -= deltaL / ls2 * bias; dGdFLambda -= deltaFL / FLs2 * bias; } } /** * Lambda gradient due to recursion kernel G(L, F_L). */ dEdLambda += dGdLambda + dGdFLambda * d2EdLambda2; /** * Cartesian coordinate gradient due to recursion kernel G(L, F_L). */ fill(dUdXdL, 0.0); lambdaInterface.getdEdXdL(dUdXdL); for (int i = 0; i < nVariables; i++) { gradient[i] += dGdFLambda * dUdXdL[i]; } if (propagateLambda && energyCount > 0) { /** * Update free energy F(L) every ~10 steps. */ if (energyCount % 10 == 0) { fLambdaUpdates++; boolean printFLambda = fLambdaUpdates % fLambdaPrintInterval == 0; totalFreeEnergy = updateFLambda(printFLambda); /** * Calculating Moving Average & Standard Deviation */ totalAverage += totalFreeEnergy; totalSquare += Math.pow(totalFreeEnergy, 2); periodCount++; if (periodCount == window - 1) { double average = totalAverage / window; double stdev = Math.sqrt((totalSquare - Math.pow(totalAverage, 2) / window) / window); logger.info(String.format( " The running average is %12.4f kcal/mol and the stdev is %8.4f kcal/mol.", average, stdev)); totalAverage = 0; totalSquare = 0; periodCount = 0; } } if (energyCount % saveFrequency == 0) { if (algorithmListener != null) { algorithmListener.algorithmUpdate(lambdaOneAssembly); } /** * Only the rank 0 process writes the histogram restart file. */ if (rank == 0) { try { OSRWHistogramWriter osrwHistogramRestart = new OSRWHistogramWriter( new BufferedWriter(new FileWriter(histogramFile))); osrwHistogramRestart.writeHistogramFile(); osrwHistogramRestart.flush(); osrwHistogramRestart.close(); logger.info(String.format(" Wrote OSRW histogram restart file to %s.", histogramFile.getName())); } catch (IOException ex) { String message = " Exception writing OSRW histogram restart file."; logger.log(Level.INFO, message, ex); } } /** * All ranks write a lambda restart file. */ try { OSRWLambdaWriter osrwLambdaRestart = new OSRWLambdaWriter( new BufferedWriter(new FileWriter(lambdaFile))); osrwLambdaRestart.writeLambdaFile(); osrwLambdaRestart.flush(); osrwLambdaRestart.close(); logger.info(String.format(" Wrote OSRW lambda restart file to %s.", lambdaFile.getName())); } catch (IOException ex) { String message = " Exception writing OSRW lambda restart file."; logger.log(Level.INFO, message, ex); } } /** * Write out snapshot upon each full lambda traversal. */ if (writeTraversalSnapshots) { double heldTraversalLambda = 0.5; if (!traversalInHand.isEmpty()) { heldTraversalLambda = Double.parseDouble(traversalInHand.get(0).split(",")[0]); if ((lambda > 0.2 && traversalSnapshotTarget == 0) || (lambda < 0.8 && traversalSnapshotTarget == 1)) { int snapshotCounts = Integer.parseInt(traversalInHand.get(0).split(",")[1]); traversalInHand.remove(0); File fileToWrite; int numStructures; if (traversalSnapshotTarget == 0) { fileToWrite = lambdaZeroFile; numStructures = ++lambdaZeroStructures; } else { fileToWrite = lambdaOneFile; numStructures = ++lambdaOneStructures; } try { FileWriter fw = new FileWriter(fileToWrite, true); BufferedWriter bw = new BufferedWriter(fw); bw.write(String.format("MODEL %d L=%.4f counts=%d", numStructures, heldTraversalLambda, snapshotCounts)); for (int i = 0; i < 50; i++) { bw.write(" "); } bw.newLine(); for (int i = 0; i < traversalInHand.size(); i++) { bw.write(traversalInHand.get(i)); bw.newLine(); } bw.write(String.format("ENDMDL")); for (int i = 0; i < 75; i++) { bw.write(" "); } bw.newLine(); bw.close(); logger.info(String.format(" Wrote traversal structure L=%.4f", heldTraversalLambda)); } catch (Exception exception) { logger.warning(String.format("Exception writing to file: %s", fileToWrite.getName())); } heldTraversalLambda = 0.5; traversalInHand.clear(); traversalSnapshotTarget = 1 - traversalSnapshotTarget; } } if (((lambda < 0.1 && traversalInHand.isEmpty()) || (lambda < heldTraversalLambda - 0.025 && !traversalInHand.isEmpty())) && (traversalSnapshotTarget == 0 || traversalSnapshotTarget == -1)) { if (lambdaZeroFilter == null) { lambdaZeroFilter = new PDBFilter(lambdaZeroFile, lambdaZeroAssembly, null, null); lambdaZeroFilter.setListMode(true); } lambdaZeroFilter.clearListOutput(); lambdaZeroFilter.writeFileWithHeader(lambdaFile, new StringBuilder(String.format("%.4f,%d,", lambda, totalCounts))); traversalInHand = lambdaZeroFilter.getListOutput(); traversalSnapshotTarget = 0; } else if (((lambda > 0.9 && traversalInHand.isEmpty()) || (lambda > heldTraversalLambda + 0.025 && !traversalInHand.isEmpty())) && (traversalSnapshotTarget == 1 || traversalSnapshotTarget == -1)) { if (lambdaOneFilter == null) { lambdaOneFilter = new PDBFilter(lambdaOneFile, lambdaOneAssembly, null, null); lambdaOneFilter.setListMode(true); } lambdaOneFilter.clearListOutput(); lambdaOneFilter.writeFileWithHeader(lambdaFile, new StringBuilder(String.format("%.4f,%d,", lambda, totalCounts))); traversalInHand = lambdaOneFilter.getListOutput(); traversalSnapshotTarget = 1; } } } /** * Compute the energy and gradient for the recursion slave at F(L) using * interpolation. */ double freeEnergy = currentFreeEnergy(); biasEnergy += freeEnergy; if (print) { logger.info(String.format(" %s %16.8f", "Bias Energy ", biasEnergy)); logger.info(String.format(" %s %16.8f %s", "OSRW Potential ", e + biasEnergy, "(Kcal/mole)")); } if (propagateLambda && energyCount > 0) { /** * Log the current Lambda state. */ if (energyCount % printFrequency == 0) { if (lambdaBins < 1000) { logger.info(String.format(" L=%6.4f (%3d) F_LU=%10.4f F_LB=%10.4f F_L=%10.4f", lambda, lambdaBin, dEdU, dEdLambda - dEdU, dEdLambda)); } else { logger.info(String.format(" L=%6.4f (%4d) F_LU=%10.4f F_LB=%10.4f F_L=%10.4f", lambda, lambdaBin, dEdU, dEdLambda - dEdU, dEdLambda)); } } /** * Metadynamics grid counts (every 'countInterval' steps). */ if (energyCount % countInterval == 0) { if (jobBackend != null) { if (world.size() > 1) { jobBackend.setComment(String.format( "Overall dG=%10.4f at %7.3e psec, Current: [L=%6.4f, F_L=%10.4f, dG=%10.4f] at %7.3e psec", totalFreeEnergy, totalCounts * dt * countInterval, lambda, dEdU, -freeEnergy, energyCount * dt)); } else { jobBackend.setComment(String.format( "Overall dG=%10.4f at %7.3e psec, Current: [L=%6.4f, F_L=%10.4f, dG=%10.4f]", totalFreeEnergy, totalCounts * dt * countInterval, lambda, dEdU, -freeEnergy)); } } if (asynchronous) { asynchronousSend(lambda, dEdU); } else { synchronousSend(lambda, dEdU); } } } /** * Propagate the Lambda particle. */ if (propagateLambda) { langevin(); } else { equilibrationCounts++; if (jobBackend != null) { jobBackend.setComment(String.format("Equilibration [L=%6.4f, F_L=%10.4f]", lambda, dEdU)); } if (equilibrationCounts % 10 == 0) { logger.info(String.format(" L=%6.4f, F_L=%10.4f", lambda, dEdU)); } } totalEnergy = e + biasEnergy; return totalEnergy; }
From source file:de.dfki.km.perspecting.obie.corpus.TextCorpus.java
public LabeledTextCorpus labelRDFTypes(final File corpus, final Pipeline pipeline, final String template) throws Exception { final BufferedWriter writer = new BufferedWriter(new FileWriter(corpus)); this.forEach(new DocumentProcedure<String>() { @Override/*w w w .ja v a2s . c o m*/ public String process(Reader doc, URI uri) throws Exception { Document document = pipeline.createDocument(doc, uri, corpusMediaType, template, language); for (int step = 0; pipeline.hasNext(step); step = pipeline.execute(step, document)) ; TIntHashSet sentenceBoundaries = new TIntHashSet(); for (TokenSequence<Integer> sentence : document.getSentences()) { sentenceBoundaries.add(sentence.getEnd()); } for (Token token : document) { String word = token.toString(); String pos = token.getPartOfSpeechTag(); String phrase = token.getNounPhraseTag(); int label = -1; int[] types = token.getTypes(0.0).toArray(); if (types.length > 0) { label = pipeline.getKnowledgeBase().getCluster(types); // System.out.println(word + " " + kb.getURI(label)); } // int[] subjects = token.getSubjects().toArray(); // if (subjects.length > 0) { // System.out.println(word + " " + // Arrays.toString(subjects)); // } writer.append(word); writer.append(SPACE); writer.append(pos); writer.append(SPACE); writer.append(phrase); writer.append(SPACE); if (label > 0) { writer.append(Integer.toString(label)); } else { writer.append(LabeledTextCorpus.OUTSIDE_ANY_LABEL); } writer.newLine(); if (sentenceBoundaries.contains(token.getEnd())) { writer.newLine(); } } writer.flush(); return uri.toString(); } }); writer.close(); return new LabeledTextCorpus(corpus, MediaType.TEXT, this); }
From source file:com.zimbra.cs.util.ProxyConfOverride.java
/** * Read from cache that holding template file's content and translate to * conf//from w w w. jav a 2 s . c om */ private static void expandTempateFromCache(List<String> cache, BufferedWriter conf) throws IOException { for (String line : cache) { line = StringUtil.fillTemplate(line, mVars); conf.write(line); conf.newLine(); } }
From source file:gui.GW2EventerGui.java
private void checkIniDir() { String path = System.getProperty("user.home") + "\\.gw2eventer"; File f;/*from ww w.j a va 2s . c o m*/ InputStream in; Reader reader; f = new File(path); if (!f.exists() && !f.isDirectory()) { f.mkdirs(); } f = new File(path + "\\tts.bat"); if (!f.exists() && !f.isDirectory()) { Writer writer = null; BufferedWriter fout = null; try { writer = new OutputStreamWriter(new FileOutputStream(path + "\\tts.bat"), "ISO-8859-15"); fout = new BufferedWriter(writer); fout.write("@echo off"); fout.newLine(); fout.write("%HOMEPATH%\\.gw2eventer\\tts.vbs"); fout.newLine(); fout.write("if exist %HOMEPATH%\\.gw2eventer\\tts.vbs del %HOMEPATH%\\.gw2eventer\\tts.vbs"); } catch (UnsupportedEncodingException ex) { Logger.getLogger(GW2EventerGui.class.getName()).log(Level.SEVERE, null, ex); } catch (FileNotFoundException ex) { Logger.getLogger(GW2EventerGui.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(GW2EventerGui.class.getName()).log(Level.SEVERE, null, ex); } finally { try { fout.close(); writer.close(); } catch (IOException ex) { Logger.getLogger(GW2EventerGui.class.getName()).log(Level.SEVERE, null, ex); } } } }