List of usage examples for java.io File getName
public String getName()
From source file:edu.isi.karma.research.modeling.ModelLearner_LOD.java
public static void main(String[] args) throws Exception { ServletContextParameterMap contextParameters = ContextParametersRegistry.getInstance().getDefault(); contextParameters.setParameterValue(ContextParameter.USER_CONFIG_DIRECTORY, "/Users/mohsen/karma/config"); OntologyManager ontologyManager = new OntologyManager(contextParameters.getId()); File ff = new File(Params.ONTOLOGY_DIR); File[] files = ff.listFiles(); if (files == null) { logger.error("no ontology to import at " + ff.getAbsolutePath()); return;/*from w w w .j a va2 s. c om*/ } for (File f : files) { if (f.getName().endsWith(".owl") || f.getName().endsWith(".rdf") || f.getName().endsWith(".n3") || f.getName().endsWith(".ttl") || f.getName().endsWith(".xml")) { logger.info("Loading ontology file: " + f.getAbsolutePath()); ontologyManager.doImport(f, "UTF-8"); } } ontologyManager.updateCache(); String outputPath = Params.OUTPUT_DIR; String graphPath = Params.GRAPHS_DIR; FileUtils.cleanDirectory(new File(graphPath)); List<SemanticModel> semanticModels = ModelReader.importSemanticModelsFromJsonFiles(Params.MODEL_DIR, Params.MODEL_MAIN_FILE_EXT); ModelLearner_LOD modelLearner = null; boolean onlyGenerateSemanticTypeStatistics = false; boolean onlyUseOntology = false; boolean useCorrectType = false; int numberOfCandidates = 4; boolean onlyEvaluateInternalLinks = false; int maxPatternSize = 3; if (onlyGenerateSemanticTypeStatistics) { getStatistics(semanticModels); return; } String filePath = Params.RESULTS_DIR + "temp/"; String filename = ""; filename += "lod-results"; filename += useCorrectType ? "-correct" : "-k=" + numberOfCandidates; filename += onlyUseOntology ? "-ontology" : "-p" + maxPatternSize; filename += onlyEvaluateInternalLinks ? "-internal" : "-all"; filename += ".csv"; PrintWriter resultFile = new PrintWriter(new File(filePath + filename)); resultFile.println("source \t p \t r \t t \n"); for (int i = 0; i < semanticModels.size(); i++) { // for (int i = 0; i <= 10; i++) { // int i = 1; { int newSourceIndex = i; SemanticModel newSource = semanticModels.get(newSourceIndex); logger.info("======================================================"); logger.info(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); System.out.println(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); logger.info("======================================================"); SemanticModel correctModel = newSource; List<ColumnNode> columnNodes = correctModel.getColumnNodes(); List<Node> steinerNodes = new LinkedList<Node>(columnNodes); String graphName = graphPath + "lod" + Params.GRAPH_FILE_EXT; if (onlyUseOntology) { modelLearner = new ModelLearner_LOD(new GraphBuilder(ontologyManager, false), steinerNodes); } else if (new File(graphName).exists()) { // read graph from file try { logger.info("loading the graph ..."); DirectedWeightedMultigraph<Node, DefaultLink> graph = GraphUtil.importJson(graphName); modelLearner = new ModelLearner_LOD(new GraphBuilderTopK(ontologyManager, graph), steinerNodes); } catch (Exception e) { e.printStackTrace(); resultFile.close(); return; } } else { logger.info("building the graph ..."); // create and save the graph to file // GraphBuilder_Popularity b = new GraphBuilder_Popularity(ontologyManager, // Params.LOD_OBJECT_PROPERIES_FILE, // Params.LOD_DATA_PROPERIES_FILE); GraphBuilder_LOD_Pattern b = new GraphBuilder_LOD_Pattern(ontologyManager, Params.PATTERNS_DIR, maxPatternSize); modelLearner = new ModelLearner_LOD(b.getGraphBuilder(), steinerNodes); } long start = System.currentTimeMillis(); List<SortableSemanticModel> hypothesisList = modelLearner.hypothesize(useCorrectType, numberOfCandidates); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis / 1000F; List<SortableSemanticModel> topHypotheses = null; if (hypothesisList != null) { // for (SortableSemanticModel sss : hypothesisList) { // ModelEvaluation mmm = sss.evaluate(correctModel); // System.out.println(mmm.getPrecision() + ", " + mmm.getRecall()); // } topHypotheses = hypothesisList.size() > 10 ? hypothesisList.subList(0, 10) : hypothesisList; } Map<String, SemanticModel> models = new TreeMap<String, SemanticModel>(); ModelEvaluation me; models.put("1-correct model", correctModel); if (topHypotheses != null) for (int k = 0; k < topHypotheses.size(); k++) { SortableSemanticModel m = topHypotheses.get(k); me = m.evaluate(correctModel, onlyEvaluateInternalLinks, false); String label = "candidate " + k + "\n" + // (m.getSteinerNodes() == null ? "" : m.getSteinerNodes().getScoreDetailsString()) + "link coherence:" + (m.getLinkCoherence() == null ? "" : m.getLinkCoherence().getCoherenceValue()) + "\n"; label += (m.getSteinerNodes() == null || m.getSteinerNodes().getCoherence() == null) ? "" : "node coherence:" + m.getSteinerNodes().getCoherence().getCoherenceValue() + "\n"; label += "confidence:" + m.getConfidenceScore() + "\n"; label += m.getSteinerNodes() == null ? "" : "mapping score:" + m.getSteinerNodes().getScore() + "\n"; label += "cost:" + roundDecimals(m.getCost(), 6) + "\n" + // "-distance:" + me.getDistance() + "-precision:" + me.getPrecision() + "-recall:" + me.getRecall(); models.put(label, m); if (k == 0) { // first rank model System.out.println("precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec); logger.info("precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec); String s = newSource.getName() + "\t" + me.getPrecision() + "\t" + me.getRecall() + "\t" + elapsedTimeSec; resultFile.println(s); } } String outName = outputPath + newSource.getName() + Params.GRAPHVIS_OUT_DETAILS_FILE_EXT; GraphVizUtil.exportSemanticModelsToGraphviz(models, newSource.getName(), outName, GraphVizLabelType.LocalId, GraphVizLabelType.LocalUri, true, true); } resultFile.close(); }
From source file:edu.uthscsa.ric.papaya.builder.Builder.java
public static void main(final String[] args) { final Builder builder = new Builder(); // process command line final CommandLine cli = builder.createCLI(args); builder.setUseSample(cli.hasOption(ARG_SAMPLE)); builder.setUseAtlas(cli.hasOption(ARG_ATLAS)); builder.setLocal(cli.hasOption(ARG_LOCAL)); builder.setPrintHelp(cli.hasOption(ARG_HELP)); builder.setUseImages(cli.hasOption(ARG_IMAGE)); builder.setSingleFile(cli.hasOption(ARG_SINGLE)); builder.setUseParamFile(cli.hasOption(ARG_PARAM_FILE)); builder.setUseTitle(cli.hasOption(ARG_TITLE)); // print help, if necessary if (builder.isPrintHelp()) { builder.printHelp();/* w w w . ja va 2s . com*/ return; } // find project root directory if (cli.hasOption(ARG_ROOT)) { try { builder.projectDir = (new File(cli.getOptionValue(ARG_ROOT))).getCanonicalFile(); } catch (final IOException ex) { System.err.println("Problem finding root directory. Reason: " + ex.getMessage()); } } if (builder.projectDir == null) { builder.projectDir = new File(System.getProperty("user.dir")); } // clean output dir final File outputDir = new File(builder.projectDir + "/" + OUTPUT_DIR); System.out.println("Cleaning output directory..."); try { builder.cleanOutputDir(outputDir); } catch (final IOException ex) { System.err.println("Problem cleaning build directory. Reason: " + ex.getMessage()); } if (builder.isLocal()) { System.out.println("Building for local usage..."); } // write JS final File compressedFileJs = new File(outputDir, OUTPUT_JS_FILENAME); // build properties try { final File buildFile = new File(builder.projectDir + "/" + BUILD_PROP_FILE); builder.readBuildProperties(buildFile); builder.buildNumber++; // increment build number builder.writeBuildProperties(compressedFileJs, true); builder.writeBuildProperties(buildFile, false); } catch (final IOException ex) { System.err.println("Problem handling build properties. Reason: " + ex.getMessage()); } String htmlParameters = null; if (builder.isUseParamFile()) { final String paramFileArg = cli.getOptionValue(ARG_PARAM_FILE); if (paramFileArg != null) { try { System.out.println("Including parameters..."); final String parameters = FileUtils.readFileToString(new File(paramFileArg), "UTF-8"); htmlParameters = "var params = " + parameters + ";"; } catch (final IOException ex) { System.err.println("Problem reading parameters file! " + ex.getMessage()); } } } String title = null; if (builder.isUseTitle()) { String str = cli.getOptionValue(ARG_TITLE); if (str != null) { str = str.trim(); str = str.replace("\"", ""); str = str.replace("'", ""); if (str.length() > 0) { title = str; System.out.println("Using title: " + title); } } } try { final JSONArray loadableImages = new JSONArray(); // sample image if (builder.isUseSample()) { System.out.println("Including sample image..."); final File sampleFile = new File(builder.projectDir + "/" + SAMPLE_IMAGE_NII_FILE); final String filename = Utilities .replaceNonAlphanumericCharacters(Utilities.removeNiftiExtensions(sampleFile.getName())); if (builder.isLocal()) { loadableImages.put(new JSONObject("{\"nicename\":\"Sample Image\",\"name\":\"" + filename + "\",\"encode\":\"" + filename + "\"}")); final String sampleEncoded = Utilities.encodeImageFile(sampleFile); FileUtils.writeStringToFile(compressedFileJs, "var " + filename + "= \"" + sampleEncoded + "\";\n", "UTF-8", true); } else { loadableImages.put(new JSONObject("{\"nicename\":\"Sample Image\",\"name\":\"" + filename + "\",\"url\":\"" + SAMPLE_IMAGE_NII_FILE + "\"}")); FileUtils.copyFile(sampleFile, new File(outputDir + "/" + SAMPLE_IMAGE_NII_FILE)); } } // atlas if (builder.isUseAtlas()) { Atlas atlas = null; try { String atlasArg = cli.getOptionValue(ARG_ATLAS); if (atlasArg == null) { atlasArg = (builder.projectDir + "/" + SAMPLE_DEFAULT_ATLAS_FILE); } final File atlasXmlFile = new File(atlasArg); System.out.println("Including atlas " + atlasXmlFile); atlas = new Atlas(atlasXmlFile); final File atlasJavaScriptFile = atlas.createAtlas(builder.isLocal()); System.out.println("Using atlas image file " + atlas.getImageFile()); if (builder.isLocal()) { loadableImages.put( new JSONObject("{\"nicename\":\"Atlas\",\"name\":\"" + atlas.getImageFileNewName() + "\",\"encode\":\"" + atlas.getImageFileNewName() + "\",\"hide\":true}")); } else { final File atlasImageFile = atlas.getImageFile(); final String atlasPath = "data/" + atlasImageFile.getName(); loadableImages.put(new JSONObject("{\"nicename\":\"Atlas\",\"name\":\"" + atlas.getImageFileNewName() + "\",\"url\":\"" + atlasPath + "\",\"hide\":true}")); FileUtils.copyFile(atlasImageFile, new File(outputDir + "/" + atlasPath)); } builder.writeFile(atlasJavaScriptFile, compressedFileJs); } catch (final IOException ex) { System.err.println("Problem finding atlas file. Reason: " + ex.getMessage()); } } // additional images if (builder.isUseImages()) { final String[] imageArgs = cli.getOptionValues(ARG_IMAGE); if (imageArgs != null) { for (final String imageArg : imageArgs) { final File file = new File(imageArg); System.out.println("Including image " + file); final String filename = Utilities .replaceNonAlphanumericCharacters(Utilities.removeNiftiExtensions(file.getName())); if (builder.isLocal()) { loadableImages.put(new JSONObject( "{\"nicename\":\"" + Utilities.removeNiftiExtensions(file.getName()) + "\",\"name\":\"" + filename + "\",\"encode\":\"" + filename + "\"}")); final String sampleEncoded = Utilities.encodeImageFile(file); FileUtils.writeStringToFile(compressedFileJs, "var " + filename + "= \"" + sampleEncoded + "\";\n", "UTF-8", true); } else { final String filePath = "data/" + file.getName(); loadableImages.put(new JSONObject( "{\"nicename\":\"" + Utilities.removeNiftiExtensions(file.getName()) + "\",\"name\":\"" + filename + "\",\"url\":\"" + filePath + "\"}")); FileUtils.copyFile(file, new File(outputDir + "/" + filePath)); } } } } File tempFileJs = null; try { tempFileJs = builder.createTempFile(); } catch (final IOException ex) { System.err.println("Problem creating temp write file. Reason: " + ex.getMessage()); } // write image refs FileUtils.writeStringToFile(tempFileJs, "var " + PAPAYA_LOADABLE_IMAGES + " = " + loadableImages.toString() + ";\n", "UTF-8", true); // compress JS tempFileJs = builder.concatenateFiles(JS_FILES, "js", tempFileJs); System.out.println("Compressing JavaScript... "); FileUtils.writeStringToFile(compressedFileJs, "\n", "UTF-8", true); builder.compressJavaScript(tempFileJs, compressedFileJs, new YuiCompressorOptions()); //tempFileJs.deleteOnExit(); } catch (final IOException ex) { System.err.println("Problem concatenating JavaScript. Reason: " + ex.getMessage()); } // compress CSS final File compressedFileCss = new File(outputDir, OUTPUT_CSS_FILENAME); try { final File concatFile = builder.concatenateFiles(CSS_FILES, "css", null); System.out.println("Compressing CSS... "); builder.compressCSS(concatFile, compressedFileCss, new YuiCompressorOptions()); concatFile.deleteOnExit(); } catch (final IOException ex) { System.err.println("Problem concatenating CSS. Reason: " + ex.getMessage()); } // write HTML try { System.out.println("Writing HTML... "); if (builder.singleFile) { builder.writeHtml(outputDir, compressedFileJs, compressedFileCss, htmlParameters, title); } else { builder.writeHtml(outputDir, htmlParameters, title); } } catch (final IOException ex) { System.err.println("Problem writing HTML. Reason: " + ex.getMessage()); } System.out.println("Done! Output files located at " + outputDir); }
From source file:org.silverpeas.dbbuilder.DBBuilder.java
/** * @param args/*from w ww .j a v a 2 s .c o m*/ * @see */ public static void main(String[] args) { ClassPathXmlApplicationContext springContext = new ClassPathXmlApplicationContext( "classpath:/spring-jdbc-datasource.xml"); try { // Ouverture des traces Date startDate = new Date(); System.out.println( MessageFormat.format(messages.getString("dbbuilder.start"), DBBuilderAppVersion, startDate)); console = new Console(DBBuilder.class); console.printMessage("*************************************************************"); console.printMessage( MessageFormat.format(messages.getString("dbbuilder.start"), DBBuilderAppVersion, startDate)); // Lecture des variables d'environnement partir de dbBuilderSettings dbBuilderResources = FileUtil .loadResource("/org/silverpeas/dbBuilder/settings/dbBuilderSettings.properties"); // Lecture des paramtres d'entre params = new CommandLineParameters(console, args); if (params.isSimulate() && DatabaseType.ORACLE == params.getDbType()) { throw new Exception(messages.getString("oracle.simulate.error")); } console.printMessage(messages.getString("jdbc.connection.configuration")); console.printMessage(ConnectionFactory.getConnectionInfo()); console.printMessage("\tAction : " + params.getAction()); console.printMessage("\tVerbose mode : " + params.isVerbose()); console.printMessage("\tSimulate mode : " + params.isSimulate()); if (Action.ACTION_CONNECT == params.getAction()) { // un petit message et puis c'est tout console.printMessage(messages.getString("connection.success")); System.out.println(messages.getString("connection.success")); } else { // Modules en place sur la BD avant install console.printMessage("DB Status before build :"); List<String> packagesIntoDB = checkDBStatus(); // initialisation d'un vecteur des instructions SQL passer en fin d'upgrade // pour mettre niveau les versions de modules en base MetaInstructions sqlMetaInstructions = new MetaInstructions(); File dirXml = new File(params.getDbType().getDBContributionDir()); DBXmlDocument destXml = loadMasterContribution(dirXml); UninstallInformations processesToCacheIntoDB = new UninstallInformations(); File[] listeFileXml = dirXml.listFiles(); Arrays.sort(listeFileXml); List<DBXmlDocument> listeDBXmlDocument = new ArrayList<DBXmlDocument>(listeFileXml.length); int ignoredFiles = 0; // Ouverture de tous les fichiers de configurations console.printMessage(messages.getString("ignored.contribution")); for (File xmlFile : listeFileXml) { if (xmlFile.isFile() && "xml".equals(FileUtil.getExtension(xmlFile)) && !(FIRST_DBCONTRIBUTION_FILE.equalsIgnoreCase(xmlFile.getName())) && !(MASTER_DBCONTRIBUTION_FILE.equalsIgnoreCase(xmlFile.getName()))) { DBXmlDocument fXml = new DBXmlDocument(dirXml, xmlFile.getName()); fXml.load(); // vrification des dpendances et prise en compte uniquement si dependences OK if (hasUnresolvedRequirements(listeFileXml, fXml)) { console.printMessage( '\t' + xmlFile.getName() + " (because of unresolved requirements)."); ignoredFiles++; } else if (ACTION_ENFORCE_UNINSTALL == params.getAction()) { console.printMessage('\t' + xmlFile.getName() + " (because of " + ACTION_ENFORCE_UNINSTALL + " mode)."); ignoredFiles++; } else { listeDBXmlDocument.add(fXml); } } } if (0 == ignoredFiles) { console.printMessage("\t(none)"); } // prpare une HashMap des modules prsents en fichiers de contribution Map packagesIntoFile = new HashMap(); int j = 0; console.printMessage(messages.getString("merged.contribution")); console.printMessage(params.getAction().toString()); if (ACTION_ENFORCE_UNINSTALL != params.getAction()) { console.printMessage('\t' + FIRST_DBCONTRIBUTION_FILE); j++; } for (DBXmlDocument currentDoc : listeDBXmlDocument) { console.printMessage('\t' + currentDoc.getName()); j++; } if (0 == j) { console.printMessage("\t(none)"); } // merge des diffrents fichiers de contribution ligibles : console.printMessage("Build decisions are :"); // d'abord le fichier dbbuilder-contribution ... DBXmlDocument fileXml; if (ACTION_ENFORCE_UNINSTALL != params.getAction()) { try { fileXml = new DBXmlDocument(dirXml, FIRST_DBCONTRIBUTION_FILE); fileXml.load(); } catch (Exception e) { // contribution de dbbuilder non trouve -> on continue, on est certainement en train // de desinstaller la totale fileXml = null; } if (null != fileXml) { DBBuilderFileItem dbbuilderItem = new DBBuilderFileItem(fileXml); packagesIntoFile.put(dbbuilderItem.getModule(), null); mergeActionsToDo(dbbuilderItem, destXml, processesToCacheIntoDB, sqlMetaInstructions); } } // ... puis les autres for (DBXmlDocument currentDoc : listeDBXmlDocument) { DBBuilderFileItem tmpdbbuilderItem = new DBBuilderFileItem(currentDoc); packagesIntoFile.put(tmpdbbuilderItem.getModule(), null); mergeActionsToDo(tmpdbbuilderItem, destXml, processesToCacheIntoDB, sqlMetaInstructions); } // ... et enfin les pices BD dsinstaller // ... attention, l'ordonnancement n'tant pas dispo, on les traite dans // l'ordre inverse pour faire passer busCore a la fin, de nombreuses contraintes // des autres modules referencant les PK de busCore List<String> itemsList = new ArrayList<String>(); boolean foundDBBuilder = false; for (String dbPackage : packagesIntoDB) { if (!packagesIntoFile.containsKey(dbPackage)) { // Package en base et non en contribution -> candidat desinstallation if (DBBUILDER_MODULE.equalsIgnoreCase(dbPackage)) { foundDBBuilder = true; } else if (ACTION_ENFORCE_UNINSTALL == params.getAction()) { if (dbPackage.equals(params.getModuleName())) { itemsList.add(0, dbPackage); } } else { itemsList.add(0, dbPackage); } } } if (foundDBBuilder) { if (ACTION_ENFORCE_UNINSTALL == params.getAction()) { if (DBBUILDER_MODULE.equals(params.getModuleName())) { itemsList.add(itemsList.size(), DBBUILDER_MODULE); } } else { itemsList.add(itemsList.size(), DBBUILDER_MODULE); } } for (String item : itemsList) { console.printMessage("**** Treating " + item + " ****"); DBBuilderDBItem tmpdbbuilderItem = new DBBuilderDBItem(item); mergeActionsToDo(tmpdbbuilderItem, destXml, processesToCacheIntoDB, sqlMetaInstructions); } destXml.setName("res.txt"); destXml.save(); console.printMessage("Build parts are :"); // Traitement des pices slectionnes // remarque : durant cette phase, les erreurs sont traites -> on les catche en // retour sans les retraiter if (ACTION_INSTALL == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_INSTALL); } else if (ACTION_UNINSTALL == params.getAction() || ACTION_ENFORCE_UNINSTALL == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_UNINSTALL); } else if (ACTION_OPTIMIZE == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_OPTIMIZE); } else if (ACTION_ALL == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_ALL); } // Modules en place sur la BD en final console.printMessage("Finally DB Status :"); checkDBStatus(); } Date endDate = new Date(); console.printMessage(MessageFormat.format(messages.getString("dbbuilder.success"), endDate)); System.out.println("*******************************************************************"); System.out.println(MessageFormat.format(messages.getString("dbbuilder.success"), endDate)); } catch (Exception e) { e.printStackTrace(); console.printError(e.getMessage(), e); Date endDate = new Date(); console.printError(MessageFormat.format(messages.getString("dbbuilder.failure"), endDate)); System.out.println("*******************************************************************"); System.out.println(MessageFormat.format(messages.getString("dbbuilder.failure"), endDate)); System.exit(1); } finally { springContext.close(); console.close(); } }
From source file:net.sf.firemox.xml.XmlConfiguration.java
/** * <ul>// ww w .j a v a 2 s . co m * 2 modes: * <li>Update the a MDB for specified TBS against the XML files (main file, * cards and fragments). Arguments are : TBS_NAME</li> * <li>Rebuild completely the MDB for specified TBS. Arguments are : -full * TBS_NAME</li> * </ul> * * @param args * main arguments. */ public static void main(String... args) { options = new Options(); final CmdLineParser parser = new CmdLineParser(options); try { parser.parseArgument(args); } catch (CmdLineException e) { // Display help info(e.getMessage()); parser.setUsageWidth(80); parser.printUsage(System.out); System.exit(-1); return; } if (options.isVersion()) { // Display version info("Version is " + IdConst.VERSION); System.exit(-1); return; } if (options.isHelp()) { // Display help parser.setUsageWidth(80); parser.printUsage(System.out); System.exit(-1); return; } warning = 0; uncompleted = 0; error = 0; long start = System.currentTimeMillis(); XmlTools.initHashMaps(); MToolKit.tbsName = options.getMdb(); String xmlFile = MToolKit.getFile(IdConst.TBS_DIR + "/" + MToolKit.tbsName + ".xml", false) .getAbsolutePath(); try { if (options.isForce()) { final File recycledDir = MToolKit.getTbsFile("recycled"); if (!recycledDir.exists() || !recycledDir.isDirectory()) { recycledDir.mkdir(); } parseRules(xmlFile, MToolKit.getTbsFile("recycled").getAbsolutePath(), new FileOutputStream(MToolKit.getTbsFile(MToolKit.tbsName + ".mdb", false))); } else { // Check the up-to-date state of MDB final File file = MToolKit .getFile(IdConst.TBS_DIR + "/" + MToolKit.tbsName + "/" + MToolKit.tbsName + ".mdb"); final long lastModifiedMdb; if (file == null) { lastModifiedMdb = 0; } else { lastModifiedMdb = file.lastModified(); } boolean update = false; // Check the up-to-date state of MDB against the main XML file if (MToolKit.getFile(xmlFile).lastModified() > lastModifiedMdb) { // The main XML file is newer than MDB System.out.println("MDB is out of date, " + xmlFile + " is newer"); update = true; } else { final File fragmentDir = MToolKit.getTbsFile(""); for (File frament : fragmentDir.listFiles( (FilenameFilter) FileFilterUtils.andFileFilter(FileFilterUtils.suffixFileFilter("xml"), FileFilterUtils.prefixFileFilter("fragment-")))) { if (frament.lastModified() > lastModifiedMdb) { // One card is newer than MDB System.out.println( "MDB is out of date, at least one fragment found : " + frament.getName()); update = true; break; } } if (!update) { // Check the up-to-date state of MDB against the cards final File recycledDir = MToolKit.getTbsFile("recycled"); if (!recycledDir.exists() || !recycledDir.isDirectory()) { recycledDir.mkdir(); } if (recycledDir.lastModified() > lastModifiedMdb) { // The recycled XML file is newer than MDB System.out.println("MDB is out of date, the recycled directory is new"); update = true; } else { for (File card : recycledDir.listFiles((FilenameFilter) FileFilterUtils.andFileFilter( FileFilterUtils.suffixFileFilter("xml"), FileFilterUtils.notFileFilter( FileFilterUtils.suffixFileFilter(IdConst.FILE_DATABASE_SAVED))))) { if (card.lastModified() > lastModifiedMdb) { // One card is newer than MDB System.out.println("MDB is out of date, at least one new card found : " + card); update = true; break; } } } } } if (!update) { return; } // Need to update the whole MDB parseRules(xmlFile, MToolKit.getTbsFile("recycled").getAbsolutePath(), new FileOutputStream(MToolKit.getTbsFile(MToolKit.tbsName + ".mdb", false))); } } catch (SAXParseException e) { // Ignore this error } catch (Exception e) { e.printStackTrace(); } if (warning > 0) { System.out.println("\t" + warning + " warning" + (warning > 1 ? "s" : "")); } if (error > 0) { System.out.println("\t" + error + " error" + (error > 1 ? "s" : "")); System.out.println("Some cards have not been built correctly. Fix them."); } else { System.out.println("\tSuccessfull build"); } System.out.println("\tTime : " + (System.currentTimeMillis() - start) / 1000 + " s"); }
From source file:edu.msu.cme.rdp.readseq.utils.QualityTrimmer.java
public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption("f", "fastq-out", false, "Write fastq instead of fasta file"); options.addOption("l", "less-than", false, "Trim at <= instead of strictly ="); options.addOption("i", "illumina", false, "Illumina trimming mode"); FastqWriter fastqOut = null;//from w w w.j a v a 2s .c o m FastaWriter fastaOut = null; byte qualTrim = -1; boolean writeFasta = true; boolean trimle = false; boolean illumina = false; List<SeqReader> readers = new ArrayList(); List<File> seqFiles = new ArrayList(); try { CommandLine line = new PosixParser().parse(options, args); if (line.hasOption("fastq-out")) { writeFasta = false; } if (line.hasOption("less-than")) { trimle = true; } if (line.hasOption("illumina")) { illumina = true; } args = line.getArgs(); if (args.length < 2) { throw new Exception("Unexpected number of arguments"); } if (args[0].length() != 1) { throw new Exception("Expected single character quality score"); } qualTrim = FastqCore.Phred33QualFunction.translate(args[0].charAt(0)); for (int index = 1; index < args.length; index++) { File seqFile = new File(args[index]); SeqReader reader; if (SeqUtils.guessFileFormat(seqFile) == SequenceFormat.FASTA) { if (index + 1 == args.length) { throw new Exception("Fasta files must be immediately followed by their quality file"); } File qualFile = new File(args[index + 1]); if (SeqUtils.guessFileFormat(qualFile) != SequenceFormat.FASTA) { throw new Exception(seqFile + " was not followed by a fasta quality file"); } reader = new QSeqReader(seqFile, qualFile); index++; } else { if (seqFile.getName().endsWith(".gz")) { reader = new SequenceReader(new GZIPInputStream(new FileInputStream(seqFile))); } else { reader = new SequenceReader(seqFile); } } readers.add(reader); seqFiles.add(seqFile); } } catch (Exception e) { new HelpFormatter().printHelp("USAGE: QualityTrimmer [options] <ascii_score> <seq_file> [qual_file]", options, true); System.err.println("Error: " + e.getMessage()); System.exit(1); } for (int readerIndex = 0; readerIndex < readers.size(); readerIndex++) { File seqFile = seqFiles.get(readerIndex); String outStem = "trimmed_" + seqFile.getName().substring(0, seqFile.getName().lastIndexOf(".")); if (writeFasta) { fastaOut = new FastaWriter(outStem + ".fasta"); } else { fastqOut = new FastqWriter(outStem + ".fastq", FastqCore.Phred33QualFunction); } int[] lengthHisto = new int[200]; SeqReader reader = readers.get(readerIndex); QSequence qseq; long totalLength = 0; int totalSeqs = 0; long trimmedLength = 0; int trimmedSeqs = 0; int zeroLengthAfterTrimming = 0; long startTime = System.currentTimeMillis(); while ((qseq = (QSequence) reader.readNextSequence()) != null) { char[] bases = qseq.getSeqString().toCharArray(); byte[] qual = qseq.getQuality(); if (bases.length != qual.length) { System.err.println(qseq.getSeqName() + ": Quality length doesn't match seq length for seq"); continue; } totalSeqs++; totalLength += bases.length; int trimIndex = -1; if (illumina && qual[bases.length - 1] == qualTrim) { trimIndex = bases.length - 1; while (trimIndex >= 0 && qual[trimIndex] == qualTrim) { trimIndex--; } trimIndex++; //Technically we're positioned over the first good base, move back to the last bad base } else if (!illumina) { for (int index = 0; index < bases.length; index++) { if (qual[index] == qualTrim || (trimle && qual[index] < qualTrim)) { trimIndex = index; break; } } } String outSeq; byte[] outQual; if (trimIndex == -1) { outSeq = qseq.getSeqString(); outQual = qseq.getQuality(); } else { outSeq = new String(bases, 0, trimIndex); outQual = Arrays.copyOfRange(qual, 0, trimIndex); trimmedSeqs++; } int len = outSeq.length(); trimmedLength += len; if (len >= lengthHisto.length) { lengthHisto = Arrays.copyOf(lengthHisto, len + 1); } lengthHisto[len]++; if (outSeq.length() == 0) { //System.err.println(qseq.getSeqName() + ": length 0 after trimming"); zeroLengthAfterTrimming++; continue; } if (writeFasta) { fastaOut.writeSeq(qseq.getSeqName(), qseq.getDesc(), outSeq); } else { fastqOut.writeSeq(qseq.getSeqName(), qseq.getDesc(), outSeq, outQual); } } reader.close(); if (writeFasta) { fastaOut.close(); } else { fastqOut.close(); } System.out.println( "Processed " + seqFile + " in " + (System.currentTimeMillis() - startTime) / 1000.0 + "s"); System.out.println("Before trimming:"); System.out.println("Total Sequences: " + totalSeqs); System.out.println("Total Sequence Data: " + totalLength); System.out.println("Average sequence length: " + ((float) totalLength / totalSeqs)); System.out.println(); System.out.println("After trimming:"); System.out.println("Total Sequences: " + (totalSeqs - zeroLengthAfterTrimming)); System.out.println("Sequences Trimmed: " + trimmedSeqs); System.out.println("Total Sequence Data: " + trimmedLength); System.out.println("Average sequence length: " + ((float) trimmedLength / (totalSeqs - zeroLengthAfterTrimming))); System.out.println(); System.out.println("Length\tCount"); for (int index = 0; index < lengthHisto.length; index++) { if (lengthHisto[index] == 0) { continue; } System.out.println(index + "\t" + lengthHisto[index]); } System.out.println(); System.out.println(); System.out.println(); } }
From source file:fr.ujm.tse.lt2c.satin.main.Main.java
public static void main(final String[] args) { final ReasoningArguments arguments = getArguments(args); if (arguments == null) { return;//from w w w . j a va2 s. com } if (arguments.getFiles().isEmpty()) { LOGGER.warn("No available file."); return; } if (arguments.isWarmupMode()) { LOGGER.info("---Warm-up lap---"); for (final File file : arguments.getFiles()) { reason(arguments, file, arguments.isBatchMode()); } LOGGER.info("---Real runs---"); } else { LOGGER.info("---Starting inference---"); } if (arguments.isVerboseMode()) { LOGGER.info("File Time Infered Profile Buffer Timeout"); } for (final File file : arguments.getFiles()) { for (int i = 0; i < arguments.getIteration(); i++) { final RunEntity run = reason(arguments, file, arguments.isBatchMode()); if (arguments.isVerboseMode()) { LOGGER.info(file.getName() + " " + run.getInferenceTime() / 1000000.0 + " " + run.getNbInferedTriples() + " " + run.getProfile() + " " + run.getBufferSize() + " " + run.getTimeout()); } } } LOGGER.info("---Done---"); }
From source file:com.adguard.compiler.Main.java
/** * Script for building extension/* w w w . ja v a 2 s.co m*/ * * @param args Arguments * @throws Exception */ public static void main(String[] args) throws Exception { disableSslValidation(); String sourcePath = getParamValue(args, "--source", "../../Extension"); String destPath = getParamValue(args, "--dest", "../../Build"); //final build name String buildName = getParamValue(args, "--name", null); //version String version = getParamValue(args, "--version", null); //build branch String branch = getParamValue(args, "--branch", null); //browser String configBrowser = getParamValue(args, "--browser", null); Browser browser = Browser.getByName(configBrowser); //download filters before build boolean updateFilters = Boolean.valueOf(getParamValue(args, "--update-filters", "false")); //use local filters boolean useLocalScriptRules = Boolean.valueOf(getParamValue(args, "--local-script-rules", "false")); //update url for extension String updateUrl = getParamValue(args, "--update-url", null); //safari extension id String extensionId = getParamValue(args, "--extensionId", null); //pack method String packMethod = getParamValue(args, "--pack", null); if (!validateParameters(sourcePath, buildName, version, extensionId, configBrowser, packMethod)) { System.exit(-1); } File source = new File(sourcePath); buildName = getBuildName(buildName, browser, version); File dest = new File(destPath, buildName); if (updateFilters) { FilterUtils.updateGroupsAndFiltersMetadata(source); FilterUtils.updateLocalFilters(source); } Map<Integer, List<String>> filtersScriptRules = FilterUtils.getScriptRules(source); File buildResult = createBuild(source, dest, useLocalScriptRules, filtersScriptRules, extensionId, updateUrl, browser, version, branch); if (browser == Browser.SAFARI && updateFilters) { FilterUtils.loadEnglishFilterForSafari(new File(buildResult, "filters")); } File packedFile = null; if (packMethod != null) { if (PACK_METHOD_ZIP.equals(packMethod)) { packedFile = PackageUtils.createZip(ZIP_MAKE_PATH, buildResult); FileUtils.deleteQuietly(buildResult); } else if (PACK_METHOD_CRX.equals(packMethod)) { packedFile = PackageUtils.createCrx(CRX_MAKE_PATH, buildResult, CHROME_CERT_FILE); FileUtils.deleteQuietly(buildResult); } else if (PACK_METHOD_XPI.equals(packMethod)) { packedFile = PackageUtils.createXpi(XPI_MAKE_PATH, buildResult, "adguard-adblocker"); FileUtils.deleteQuietly(buildResult); } } log.info("Build created. Version: " + version); if (packedFile != null) { log.info("File: " + packedFile.getName()); } else { log.info("File: " + buildResult.getName()); } if (extensionId != null) { log.info("ExtensionId: " + extensionId); } log.info("Browser: " + browser); if (updateUrl != null) { log.info("UpdateUrl: " + updateUrl); } log.info("LocalScriptRules: " + useLocalScriptRules); log.info("---------------------------------"); }
From source file:com.github.rwhogg.git_vcr.App.java
/** * main is the entry point for Git-VCR//from ww w. j av a2 s.c o m * @param args Command-line arguments */ public static void main(String[] args) { Options options = parseCommandLine(args); HierarchicalINIConfiguration configuration = null; try { configuration = getConfiguration(); } catch (ConfigurationException e) { Util.error("could not parse configuration file!"); } // verify we are in a git folder and then construct the repo final File currentFolder = new File("."); FileRepositoryBuilder builder = new FileRepositoryBuilder(); Repository localRepo = null; try { localRepo = builder.findGitDir().build(); } catch (IOException e) { Util.error("not in a Git folder!"); } // deal with submodules assert localRepo != null; if (localRepo.isBare()) { FileRepositoryBuilder parentBuilder = new FileRepositoryBuilder(); Repository parentRepo; try { parentRepo = parentBuilder.setGitDir(new File("..")).findGitDir().build(); localRepo = SubmoduleWalk.getSubmoduleRepository(parentRepo, currentFolder.getName()); } catch (IOException e) { Util.error("could not find parent of submodule!"); } } // if we need to retrieve the patch file, get it now URL patchUrl = options.getPatchUrl(); String patchPath = patchUrl.getFile(); File patchFile = null; HttpUrl httpUrl = HttpUrl.get(patchUrl); if (httpUrl != null) { try { patchFile = com.twitter.common.io.FileUtils.SYSTEM_TMP.createFile(".diff"); Request request = new Request.Builder().url(httpUrl).build(); OkHttpClient client = new OkHttpClient(); Call call = client.newCall(request); Response response = call.execute(); ResponseBody body = response.body(); if (!response.isSuccessful()) { Util.error("could not retrieve diff file from URL " + patchUrl); } String content = body.string(); org.apache.commons.io.FileUtils.write(patchFile, content, (Charset) null); } catch (IOException ie) { Util.error("could not retrieve diff file from URL " + patchUrl); } } else { patchFile = new File(patchPath); } // find the patch //noinspection ConstantConditions if (!patchFile.canRead()) { Util.error("patch file " + patchFile.getAbsolutePath() + " is not readable!"); } final Git git = new Git(localRepo); // handle the branch String branchName = options.getBranchName(); String theOldCommit = null; try { theOldCommit = localRepo.getBranch(); } catch (IOException e2) { Util.error("could not get reference to current branch!"); } final String oldCommit = theOldCommit; // needed to reference from shutdown hook if (branchName != null) { // switch to the branch try { git.checkout().setName(branchName).call(); } catch (RefAlreadyExistsException e) { // FIXME Auto-generated catch block e.printStackTrace(); } catch (RefNotFoundException e) { Util.error("the branch " + branchName + " was not found!"); } catch (InvalidRefNameException e) { Util.error("the branch name " + branchName + " is invalid!"); } catch (org.eclipse.jgit.api.errors.CheckoutConflictException e) { Util.error("there was a checkout conflict!"); } catch (GitAPIException e) { Util.error("there was an unspecified Git API failure!"); } } // ensure there are no changes before we apply the patch try { if (!git.status().call().isClean()) { Util.error("cannot run git-vcr while there are uncommitted changes!"); } } catch (NoWorkTreeException e1) { // won't happen assert false; } catch (GitAPIException e1) { Util.error("call to git status failed!"); } // list all the files changed String patchName = patchFile.getName(); Patch patch = new Patch(); try { patch.parse(new FileInputStream(patchFile)); } catch (FileNotFoundException e) { assert false; } catch (IOException e) { Util.error("could not parse the patch file!"); } ReviewResults oldResults = new ReviewResults(patchName, patch, configuration, false); try { oldResults.review(); } catch (InstantiationException e1) { Util.error("could not instantiate a review tool class!"); } catch (IllegalAccessException e1) { Util.error("illegal access to a class"); } catch (ClassNotFoundException e1) { Util.error("could not find a review tool class"); } catch (ReviewFailedException e1) { e1.printStackTrace(); Util.error("Review failed!"); } // we're about to change the repo, so register a shutdown hook to clean it up Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { cleanupGit(git, oldCommit); } }); // apply the patch try { git.apply().setPatch(new FileInputStream(patchFile)).call(); } catch (PatchFormatException e) { Util.error("patch file " + patchFile.getAbsolutePath() + " is malformatted!"); } catch (PatchApplyException e) { Util.error("patch file " + patchFile.getAbsolutePath() + " did not apply correctly!"); } catch (FileNotFoundException e) { assert false; } catch (GitAPIException e) { Util.error(e.getLocalizedMessage()); } ReviewResults newResults = new ReviewResults(patchName, patch, configuration, true); try { newResults.review(); } catch (InstantiationException e1) { Util.error("could not instantiate a review tool class!"); } catch (IllegalAccessException e1) { Util.error("illegal access to a class"); } catch (ClassNotFoundException e1) { Util.error("could not find a review tool class"); } catch (ReviewFailedException e1) { e1.printStackTrace(); Util.error("Review failed!"); } // generate and show the report VelocityReport report = new VelocityReport(patch, oldResults, newResults); File reportFile = null; try { reportFile = com.twitter.common.io.FileUtils.SYSTEM_TMP.createFile(".html"); org.apache.commons.io.FileUtils.write(reportFile, report.toString(), (String) null); } catch (IOException e) { Util.error("could not generate the results page!"); } try { assert reportFile != null; Desktop.getDesktop().open(reportFile); } catch (IOException e) { Util.error("could not open the results page!"); } }
From source file:org.yardstickframework.report.jfreechart.JFreeChartGraphPlotter.java
/** * @param cmdArgs Arguments.// w w w .j a va 2s . c o m */ public static void main(String[] cmdArgs) { try { JFreeChartGraphPlotterArguments args = new JFreeChartGraphPlotterArguments(); JCommander jCommander = jcommander(cmdArgs, args, "<graph-plotter>"); if (args.help()) { jCommander.usage(); return; } if (args.inputFolders().isEmpty()) { errorHelp("Input folders are not defined."); return; } List<String> inFoldersAsString = args.inputFolders(); List<File> inFolders = new ArrayList<>(inFoldersAsString.size()); for (String folderAsString : inFoldersAsString) inFolders.add(new File(folderAsString).getAbsoluteFile()); for (File inFolder : inFolders) { if (!inFolder.exists()) { errorHelp("Folder does not exist: " + inFolder.getAbsolutePath()); return; } } List<List<List<File>>> benchFolders = new ArrayList<>(); for (File inFolder : inFolders) { File[] dirs0 = inFolder.listFiles(); if (dirs0 == null || dirs0.length == 0) continue; List<File> dirs = new ArrayList<>(Arrays.asList(dirs0)); Collections.sort(dirs, FILE_NAME_COMP); boolean multipleDrivers = false; for (File f : dirs) { if (f.isFile() && MULTIPLE_DRIVERS_MARKER_FILE.equals(f.getName())) { multipleDrivers = true; break; } } List<List<File>> mulDrvFiles = new ArrayList<>(); if (multipleDrivers) { for (File f : dirs) { List<File> files = getFiles(f); if (files != null) mulDrvFiles.add(files); } } else { List<File> files = getFiles(inFolder); if (files != null) mulDrvFiles.add(files); } benchFolders.add(mergeMultipleDriverLists(mulDrvFiles)); } if (benchFolders.isEmpty()) { errorHelp("Input folders are empty or have invalid structure: " + inFoldersAsString); return; } String outputFolder = outputFolder(inFolders); JFreeChartGenerationMode mode = args.generationMode(); if (mode == COMPOUND) processCompoundMode(outputFolder, benchFolders, args); else if (mode == COMPARISON) processComparisonMode(outputFolder, benchFolders, args); else if (mode == STANDARD) processStandardMode(benchFolders, args); else errorHelp("Unknown generation mode: " + args.generationMode()); } catch (ParameterException e) { errorHelp("Invalid parameter.", e); } catch (Exception e) { errorHelp("Failed to execute graph generator.", e); } }
From source file:org.ala.spatial.web.services.DownloadController.java
public static void main(String[] args) { // maxent - 1323641423144 // aloc - 1323844048457 String pid = "1323844048457"; try {// ww w . j ava 2 s .c o m File dir = sfindFile(pid); if (dir != null) { //System.out.println("Found session data: " + dir.getAbsolutePath()); //return "Found session data: " + dir.getAbsolutePath(); String parentName = "ALA_"; String parentPath = dir.getParent().substring(dir.getParent().lastIndexOf("/") + 1); String zipfile = dir.getParent() + "/" + pid + ".zip"; if ("maxent".equals(parentPath)) { fixMaxentFiles(pid, dir); Zipper.zipDirectory(dir.getParent() + "/temp/" + pid, zipfile); } else if ("layers".equals(parentPath) || "aloc".equals(parentPath)) { fixAlocFiles(pid, dir); Zipper.zipDirectory(dir.getParent() + "/temp/" + pid, zipfile); } else { Zipper.zipDirectory(dir.getAbsolutePath(), zipfile); } System.out.println( "Found " + dir.getName() + " in " + dir.getParent() + " and zipped at: " + zipfile); //return "Found " + dir.getName() + " in " + dir.getParent() + " and zipped at: " + zipfile; if ("maxent".equals(parentPath)) { parentName = "ALA_Prediction_"; } else if ("sampling".equals(parentPath)) { parentName = "ALA_Species_Samples_"; } else if ("layers".equals(parentPath) || "aloc".equals(parentPath)) { parentName = "ALA_Classification_"; } else if ("gdm".equals(parentPath)) { parentName = "ALA_GDM_"; } else if ("filtering".equals(parentPath)) { parentName = "ALA_EnvFilter_"; } else if ("sitesbyspecies".equals(parentPath)) { parentName = "ALA_SitesBySpecies_"; } File file = new File(zipfile); System.out.println("File generated: " + file.getAbsolutePath()); } else { System.out.println("Could not find session data"); //return "Could not find session data"; } } catch (Exception e) { e.printStackTrace(System.out); } }