List of usage examples for java.io File getName
public String getName()
From source file:mil.tatrc.physiology.utilities.csv.plots.CSVPlotTool.java
public static void main(String[] args) { if (args.length < 1) { Log.fatal("Expected inputs : [results file path]"); return;/* w w w .j a va 2 s . c om*/ } File f = new File(args[0]); if (!f.exists()) { Log.fatal("Input file cannot be found"); return; } String reportDir = "./graph_results/" + f.getName(); reportDir = reportDir.substring(0, reportDir.lastIndexOf(".")) + "/"; CSVPlotTool t = new CSVPlotTool(); t.graphResults(args[0], reportDir); }
From source file:com.mvdb.etl.actions.ExtractDBChanges.java
public static void main(String[] args) throws JSONException { ActionUtils.setUpInitFileProperty(); // boolean success = ActionUtils.markActionChainBroken("Just Testing"); // System.exit(success ? 0 : 1); ActionUtils.assertActionChainNotBroken(); ActionUtils.assertEnvironmentSetupOk(); ActionUtils.assertFileExists("~/.mvdb", "~/.mvdb missing. Existing."); ActionUtils.assertFileExists("~/.mvdb/status.InitCustomerData.complete", "300init-customer-data.sh not executed yet. Exiting"); //This check is not required as data can be modified any number of times //ActionUtils.assertFileDoesNotExist("~/.mvdb/status.ModifyCustomerData.complete", "ModifyCustomerData already done. Start with 100init.sh if required. Exiting"); ActionUtils.createMarkerFile("~/.mvdb/status.ExtractDBChanges.start", true); //String schemaDescription = "{ 'root' : [{'table' : 'orders', 'keyColumn' : 'order_id', 'updateTimeColumn' : 'update_time'}]}"; String customerName = null;//from w w w . java 2 s. c om final CommandLineParser cmdLinePosixParser = new PosixParser(); final Options posixOptions = constructPosixOptions(); CommandLine commandLine; try { commandLine = cmdLinePosixParser.parse(posixOptions, args); if (commandLine.hasOption("customer")) { customerName = commandLine.getOptionValue("customer"); } } catch (ParseException parseException) // checked exception { System.err.println( "Encountered exception while parsing using PosixParser:\n" + parseException.getMessage()); } if (customerName == null) { System.err.println("Could not find customerName. Aborting..."); System.exit(1); } ApplicationContext context = Top.getContext(); final OrderDAO orderDAO = (OrderDAO) context.getBean("orderDAO"); final ConfigurationDAO configurationDAO = (ConfigurationDAO) context.getBean("configurationDAO"); final GenericDAO genericDAO = (GenericDAO) context.getBean("genericDAO"); File snapshotDirectory = getSnapshotDirectory(configurationDAO, customerName); try { FileUtils.writeStringToFile(new File("/tmp/etl.extractdbchanges.directory.txt"), snapshotDirectory.getName(), false); } catch (IOException e) { e.printStackTrace(); System.exit(1); return; } long currentTime = new Date().getTime(); Configuration lastRefreshTimeConf = configurationDAO.find(customerName, "last-refresh-time"); Configuration schemaDescriptionConf = configurationDAO.find(customerName, "schema-description"); long lastRefreshTime = Long.parseLong(lastRefreshTimeConf.getValue()); OrderJsonFileConsumer orderJsonFileConsumer = new OrderJsonFileConsumer(snapshotDirectory); Map<String, ColumnMetadata> metadataMap = orderDAO.findMetadata(); //write file schema-orders.dat in snapshotDirectory genericDAO.fetchMetadata("orders", snapshotDirectory); //writes files: header-orders.dat, data-orders.dat in snapshotDirectory JSONObject json = new JSONObject(schemaDescriptionConf.getValue()); JSONArray rootArray = json.getJSONArray("root"); int length = rootArray.length(); for (int i = 0; i < length; i++) { JSONObject jsonObject = rootArray.getJSONObject(i); String table = jsonObject.getString("table"); String keyColumnName = jsonObject.getString("keyColumn"); String updateTimeColumnName = jsonObject.getString("updateTimeColumn"); System.out.println("table:" + table + ", keyColumn: " + keyColumnName + ", updateTimeColumn: " + updateTimeColumnName); genericDAO.fetchAll2(snapshotDirectory, new Timestamp(lastRefreshTime), table, keyColumnName, updateTimeColumnName); } //Unlikely failure //But Need to factor this into a separate task so that extraction does not have to be repeated. //Extraction is an expensive task. try { String sourceDirectoryAbsolutePath = snapshotDirectory.getAbsolutePath(); File sourceRelativeDirectoryPath = getRelativeSnapShotDirectory(configurationDAO, sourceDirectoryAbsolutePath); String hdfsRoot = ActionUtils.getConfigurationValue(ConfigurationKeys.GLOBAL_CUSTOMER, ConfigurationKeys.GLOBAL_HDFS_ROOT); String targetDirectoryFullPath = hdfsRoot + "/data" + sourceRelativeDirectoryPath; ActionUtils.copyLocalDirectoryToHdfsDirectory(sourceDirectoryAbsolutePath, targetDirectoryFullPath); String dirName = snapshotDirectory.getName(); ActionUtils.setConfigurationValue(customerName, ConfigurationKeys.LAST_COPY_TO_HDFS_DIRNAME, dirName); } catch (Throwable e) { e.printStackTrace(); logger.error("Objects Extracted from database. But copy of snapshot directory<" + snapshotDirectory.getAbsolutePath() + "> to hdfs <" + "" + ">failed. Fix the problem and redo extract.", e); System.exit(1); } //Unlikely failure //But Need to factor this into a separate task so that extraction does not have to be repeated. //Extraction is an expensive task. String targetZip = null; try { File targetZipDirectory = new File(snapshotDirectory.getParent(), "archives"); if (!targetZipDirectory.exists()) { boolean success = targetZipDirectory.mkdirs(); if (success == false) { logger.error("Objects copied to hdfs. But able to create archive directory <" + targetZipDirectory.getAbsolutePath() + ">. Fix the problem and redo extract."); System.exit(1); } } targetZip = new File(targetZipDirectory, snapshotDirectory.getName() + ".zip").getAbsolutePath(); ActionUtils.zipFullDirectory(snapshotDirectory.getAbsolutePath(), targetZip); } catch (Throwable e) { e.printStackTrace(); logger.error("Objects copied to hdfs. But zipping of snapshot directory<" + snapshotDirectory.getAbsolutePath() + "> to <" + targetZip + ">failed. Fix the problem and redo extract.", e); System.exit(1); } //orderDAO.findAll(new Timestamp(lastRefreshTime), orderJsonFileConsumer); Configuration updateRefreshTimeConf = new Configuration(customerName, "last-refresh-time", String.valueOf(currentTime)); configurationDAO.update(updateRefreshTimeConf, String.valueOf(lastRefreshTimeConf.getValue())); ActionUtils.createMarkerFile("~/.mvdb/status.ExtractDBChanges.complete", true); }
From source file:bookChapter.theoretical.AnalyzeTheoreticalMSMSCalculation.java
/** * * @param args/*ww w. j a v a2s . c o m*/ * @throws IOException * @throws FileNotFoundException * @throws ClassNotFoundException * @throws InterruptedException * @throws MzMLUnmarshallerException */ public static void main(String[] args) throws IOException, FileNotFoundException, ClassNotFoundException, IOException, InterruptedException, MzMLUnmarshallerException { Logger l = Logger.getLogger("AnalyzeTheoreticalMSMSCalculation"); Date date = Calendar.getInstance().getTime(); DateFormat formatter = new SimpleDateFormat("EEEE, dd MMMM yyyy, hh:mm:ss.SSS a"); String now = formatter.format(date); l.log(Level.INFO, "Calculation starts at {0}", now); double precursorTolerance = ConfigHolder.getInstance().getDouble("precursor.tolerance"), fragmentTolerance = ConfigHolder.getInstance().getDouble("fragment.tolerance"); String databaseName = ConfigHolder.getInstance().getString("database.name"), spectraName = ConfigHolder.getInstance().getString("spectra.name"), output = ConfigHolder.getInstance().getString("output"); int correctionFactor = ConfigHolder.getInstance().getInt("correctionFactor"); boolean theoFromAllCharges = ConfigHolder.getInstance().getBoolean("hasAllPossCharge"); BufferedWriter bw = new BufferedWriter(new FileWriter(output)); bw.write("SpectrumTitle" + "\t" + "PrecursorMZ" + "\t" + "PrecursorCharge" + "\t" + "Observed Mass (M+H)" + "\t" + "AndromedaLikeScore" + "\t" + "SequestLikeScore" + "\t" + "PeptideByAndromedaLikeScore" + "\t" + "PeptideBySequestLikeScore" + "\t" + "LevenshteinDistance" + "\t" + "TotalScoredPeps" + "\t" + "isCorrectMatchByAndromedaLike" + "\t" + "isCorrectMatchBySequestLikeScore" + "\n"); l.info("Getting database entries"); // first load all sequences into the memory HashSet<DBEntry> dbEntries = getDBEntries(databaseName); // for every spectrum-calculate both score... // now convert to binExperimental spectrum int num = 0; SpectrumFactory fct = SpectrumFactory.getInstance(); num = 0; File f = new File(spectraName); if (spectraName.endsWith(".mgf")) { fct.addSpectra(f, new WaitingHandlerCLIImpl()); l.log(Level.INFO, "Spectra scoring starts at {0}", now); for (String title : fct.getSpectrumTitles(f.getName())) { num++; MSnSpectrum ms = (MSnSpectrum) fct.getSpectrum(f.getName(), title); // here calculate all except this is an empty spectrum... if (ms.getPeakList().size() > 2) { // to check a spectrum with negative values.. String text = result(ms, precursorTolerance, dbEntries, fragmentTolerance, correctionFactor, theoFromAllCharges); if (!text.isEmpty()) { bw.write(text); } } if (num % 500 == 0) { l.info("Running " + num + " spectra." + Calendar.getInstance().getTime()); } } } l.info("Program finished at " + Calendar.getInstance().getTime()); bw.close(); }
From source file:apps.quantification.LearnQuantificationSVMPerf.java
public static void main(String[] args) throws IOException { String cmdLineSyntax = LearnQuantificationSVMPerf.class.getName() + " [OPTIONS] <path to svm_perf_learn> <path to svm_perf_classify> <trainingIndexDirectory> <outputDirectory>"; Options options = new Options(); OptionBuilder.withArgName("f"); OptionBuilder.withDescription("Number of folds"); OptionBuilder.withLongOpt("f"); OptionBuilder.isRequired(true);//from w w w. ja v a 2 s . co m OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("c"); OptionBuilder.withDescription("The c value for svm_perf (default 0.01)"); OptionBuilder.withLongOpt("c"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("t"); OptionBuilder.withDescription("Path for temporary files"); OptionBuilder.withLongOpt("t"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("l"); OptionBuilder.withDescription("The loss function to optimize (default 2):\n" + " 0 Zero/one loss: 1 if vector of predictions contains error, 0 otherwise.\n" + " 1 F1: 100 minus the F1-score in percent.\n" + " 2 Errorrate: Percentage of errors in prediction vector.\n" + " 3 Prec/Rec Breakeven: 100 minus PRBEP in percent.\n" + " 4 Prec@p: 100 minus precision at p in percent.\n" + " 5 Rec@p: 100 minus recall at p in percent.\n" + " 10 ROCArea: Percentage of swapped pos/neg pairs (i.e. 100 - ROCArea)."); OptionBuilder.withLongOpt("l"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("w"); OptionBuilder.withDescription("Choice of structural learning algorithm (default 9):\n" + " 0: n-slack algorithm described in [2]\n" + " 1: n-slack algorithm with shrinking heuristic\n" + " 2: 1-slack algorithm (primal) described in [5]\n" + " 3: 1-slack algorithm (dual) described in [5]\n" + " 4: 1-slack algorithm (dual) with constraint cache [5]\n" + " 9: custom algorithm in svm_struct_learn_custom.c"); OptionBuilder.withLongOpt("w"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("p"); OptionBuilder.withDescription("The value of p used by the prec@p and rec@p loss functions (default 0)"); OptionBuilder.withLongOpt("p"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("v"); OptionBuilder.withDescription("Verbose output"); OptionBuilder.withLongOpt("v"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(false); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("s"); OptionBuilder.withDescription("Don't delete temporary training file in svm_perf format (default: delete)"); OptionBuilder.withLongOpt("s"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(false); options.addOption(OptionBuilder.create()); SvmPerfLearnerCustomizer classificationLearnerCustomizer = null; SvmPerfClassifierCustomizer classificationCustomizer = null; int folds = -1; GnuParser parser = new GnuParser(); String[] remainingArgs = null; try { CommandLine line = parser.parse(options, args); remainingArgs = line.getArgs(); classificationLearnerCustomizer = new SvmPerfLearnerCustomizer(remainingArgs[0]); classificationCustomizer = new SvmPerfClassifierCustomizer(remainingArgs[1]); folds = Integer.parseInt(line.getOptionValue("f")); if (line.hasOption("c")) classificationLearnerCustomizer.setC(Float.parseFloat(line.getOptionValue("c"))); if (line.hasOption("w")) classificationLearnerCustomizer.setW(Integer.parseInt(line.getOptionValue("w"))); if (line.hasOption("p")) classificationLearnerCustomizer.setP(Integer.parseInt(line.getOptionValue("p"))); if (line.hasOption("l")) classificationLearnerCustomizer.setL(Integer.parseInt(line.getOptionValue("l"))); if (line.hasOption("v")) classificationLearnerCustomizer.printSvmPerfOutput(true); if (line.hasOption("s")) classificationLearnerCustomizer.setDeleteTrainingFiles(false); if (line.hasOption("t")) { classificationLearnerCustomizer.setTempPath(line.getOptionValue("t")); classificationCustomizer.setTempPath(line.getOptionValue("t")); } } catch (Exception exp) { System.err.println("Parsing failed. Reason: " + exp.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(cmdLineSyntax, options); System.exit(-1); } assert (classificationLearnerCustomizer != null); if (remainingArgs.length != 4) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(cmdLineSyntax, options); System.exit(-1); } String indexFile = remainingArgs[2]; File file = new File(indexFile); String indexName = file.getName(); String indexPath = file.getParent(); String outputPath = remainingArgs[3]; SvmPerfLearner classificationLearner = new SvmPerfLearner(); classificationLearner.setRuntimeCustomizer(classificationLearnerCustomizer); FileSystemStorageManager fssm = new FileSystemStorageManager(indexPath, false); fssm.open(); IIndex training = TroveReadWriteHelper.readIndex(fssm, indexName, TroveContentDBType.Full, TroveClassificationDBType.Full); final TextualProgressBar progressBar = new TextualProgressBar("Learning the quantifiers"); IOperationStatusListener status = new IOperationStatusListener() { @Override public void operationStatus(double percentage) { progressBar.signal((int) percentage); } }; QuantificationLearner quantificationLearner = new QuantificationLearner(folds, classificationLearner, classificationLearnerCustomizer, classificationCustomizer, ClassificationMode.PER_CATEGORY, new LogisticFunction(), status); IQuantifier[] quantifiers = quantificationLearner.learn(training); File executableFile = new File(classificationLearnerCustomizer.getSvmPerfLearnPath()); IDataManager classifierDataManager = new SvmPerfDataManager(new SvmPerfClassifierCustomizer( executableFile.getParentFile().getAbsolutePath() + Os.pathSeparator() + "svm_perf_classify")); String description = "_SVMPerf_C-" + classificationLearnerCustomizer.getC() + "_W-" + classificationLearnerCustomizer.getW() + "_L-" + classificationLearnerCustomizer.getL(); if (classificationLearnerCustomizer.getL() == 4 || classificationLearnerCustomizer.getL() == 5) description += "_P-" + classificationLearnerCustomizer.getP(); if (classificationLearnerCustomizer.getAdditionalParameters().length() > 0) description += "_" + classificationLearnerCustomizer.getAdditionalParameters(); String quantifierPrefix = indexName + "_Quantifier-" + folds + description; FileSystemStorageManager fssmo = new FileSystemStorageManager( outputPath + File.separatorChar + quantifierPrefix, true); fssmo.open(); QuantificationLearner.write(quantifiers, fssmo, classifierDataManager); fssmo.close(); BufferedWriter bfs = new BufferedWriter( new FileWriter(outputPath + File.separatorChar + quantifierPrefix + "_rates.txt")); TShortDoubleHashMap simpleTPRs = quantificationLearner.getSimpleTPRs(); TShortDoubleHashMap simpleFPRs = quantificationLearner.getSimpleFPRs(); TShortDoubleHashMap scaledTPRs = quantificationLearner.getScaledTPRs(); TShortDoubleHashMap scaledFPRs = quantificationLearner.getScaledFPRs(); ContingencyTableSet contingencyTableSet = quantificationLearner.getContingencyTableSet(); short[] cats = simpleTPRs.keys(); for (int i = 0; i < cats.length; ++i) { short cat = cats[i]; String catName = training.getCategoryDB().getCategoryName(cat); ContingencyTable contingencyTable = contingencyTableSet.getCategoryContingencyTable(cat); double simpleTPR = simpleTPRs.get(cat); double simpleFPR = simpleFPRs.get(cat); double scaledTPR = scaledTPRs.get(cat); double scaledFPR = scaledFPRs.get(cat); String line = quantifierPrefix + "\ttrain\tsimple\t" + catName + "\t" + cat + "\t" + contingencyTable.tp() + "\t" + contingencyTable.fp() + "\t" + contingencyTable.fn() + "\t" + contingencyTable.tn() + "\t" + simpleTPR + "\t" + simpleFPR + "\n"; bfs.write(line); line = quantifierPrefix + "\ttrain\tscaled\t" + catName + "\t" + cat + "\t" + contingencyTable.tp() + "\t" + contingencyTable.fp() + "\t" + contingencyTable.fn() + "\t" + contingencyTable.tn() + "\t" + scaledTPR + "\t" + scaledFPR + "\n"; bfs.write(line); } bfs.close(); }
From source file:dependencies.DependencyResolving.java
/** * @param args the command line arguments *///from w ww . j av a 2s . c o m public static void main(String[] args) { // TODO code application logic here JSONParser parser = new JSONParser(); //we use JSONParser in order to be able to read from JSON file try { //here we declare the file reader and define the path to the file dependencies.json Object obj = parser.parse(new FileReader( "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\dependencies.json")); JSONObject project = (JSONObject) obj; //a JSON object containing all the data in the .json file JSONArray dependencies = (JSONArray) project.get("dependencies"); //get array of objects with key "dependencies" System.out.print("We need to install the following dependencies: "); Iterator<String> iterator = dependencies.iterator(); //define an iterator over the array "dependencies" while (iterator.hasNext()) { System.out.println(iterator.next()); } //on the next line we declare another object, which parses a Parser object and reads from all_packages.json Object obj2 = parser.parse(new FileReader( "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\all_packages.json")); JSONObject tools = (JSONObject) obj2; //a JSON object containing all thr data in the file all_packages.json for (int i = 0; i < dependencies.size(); i++) { if (tools.containsKey(dependencies.get(i))) { System.out.println( "In order to install " + dependencies.get(i) + ", we need the following programs:"); JSONArray temporaryArray = (JSONArray) tools.get(dependencies.get(i)); //a temporary JSON array in which we store the keys and values of the dependencies for (i = 0; i < temporaryArray.size(); i++) { System.out.println(temporaryArray.get(i)); } ArrayList<Object> arraysOfJsonData = new ArrayList<Object>(); //an array in which we will store the keys of the objects, after we use the values and won't need them anymore for (i = 0; i < temporaryArray.size(); i++) { System.out.println("Installing " + temporaryArray.get(i)); } while (!temporaryArray.isEmpty()) { for (Object element : temporaryArray) { if (tools.containsKey(element)) { JSONArray secondaryArray = (JSONArray) tools.get(element); //a temporary array within the scope of the if-statement if (secondaryArray.size() != 0) { System.out.println("In order to install " + element + ", we need "); } for (i = 0; i < secondaryArray.size(); i++) { System.out.println(secondaryArray.get(i)); } for (Object o : secondaryArray) { arraysOfJsonData.add(o); //here we create a file with the installed dependency File file = new File( "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\installed_modules\\" + o); if (file.createNewFile()) { System.out.println(file.getName() + " is installed!"); } else { } } secondaryArray.clear(); } } temporaryArray.clear(); for (i = 0; i < arraysOfJsonData.size(); i++) { temporaryArray.add(arraysOfJsonData.get(i)); } arraysOfJsonData.clear(); } } } Set<String> keys = tools.keySet(); // here we define a set of keys of the objects in all_packages.json for (String s : keys) { File file = new File( "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\installed_modules\\" + s); if (file.createNewFile()) { System.out.println(file.getName() + " is installed."); } else { } } } catch (IOException ex) { Logger.getLogger(DependencyResolving.class.getName()).log(Level.SEVERE, null, ex); } catch (ParseException ex) { Logger.getLogger(DependencyResolving.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.p2p.peercds.cli.TorrentMain.java
/** * Torrent reader and creator.//from www . j ava2 s.co m * * <p> * You can use the {@code main()} function of this class to read or create * torrent files. See usage for details. * </p> * */ public static void main(String[] args) { BasicConfigurator.configure(new ConsoleAppender(new PatternLayout("%-5p: %m%n"))); CmdLineParser parser = new CmdLineParser(); CmdLineParser.Option help = parser.addBooleanOption('h', "help"); CmdLineParser.Option filename = parser.addStringOption('t', "torrent"); CmdLineParser.Option create = parser.addBooleanOption('c', "create"); CmdLineParser.Option announce = parser.addStringOption('a', "announce"); try { parser.parse(args); } catch (CmdLineParser.OptionException oe) { System.err.println(oe.getMessage()); usage(System.err); System.exit(1); } // Display help and exit if requested if (Boolean.TRUE.equals((Boolean) parser.getOptionValue(help))) { usage(System.out); System.exit(0); } String filenameValue = (String) parser.getOptionValue(filename); if (filenameValue == null) { usage(System.err, "Torrent file must be provided!"); System.exit(1); } Boolean createFlag = (Boolean) parser.getOptionValue(create); //For repeated announce urls @SuppressWarnings("unchecked") Vector<String> announceURLs = (Vector<String>) parser.getOptionValues(announce); String[] otherArgs = parser.getRemainingArgs(); if (Boolean.TRUE.equals(createFlag) && (otherArgs.length != 1 || announceURLs.isEmpty())) { usage(System.err, "Announce URL and a file or directory must be " + "provided to create a torrent file!"); System.exit(1); } OutputStream fos = null; try { if (Boolean.TRUE.equals(createFlag)) { if (filenameValue != null) { fos = new FileOutputStream(filenameValue); } else { fos = System.out; } //Process the announce URLs into URIs List<URI> announceURIs = new ArrayList<URI>(); for (String url : announceURLs) { announceURIs.add(new URI(url)); } //Create the announce-list as a list of lists of URIs //Assume all the URI's are first tier trackers List<List<URI>> announceList = new ArrayList<List<URI>>(); announceList.add(announceURIs); File source = new File(otherArgs[0]); if (!source.exists() || !source.canRead()) { throw new IllegalArgumentException( "Cannot access source file or directory " + source.getName()); } String creator = String.format("%s (ttorrent)", System.getProperty("user.name")); Torrent torrent = null; if (source.isDirectory()) { File[] files = source.listFiles(Constants.hiddenFilesFilter); Arrays.sort(files); torrent = Torrent.create(source, Arrays.asList(files), announceList, creator); } else { torrent = Torrent.create(source, announceList, creator); } torrent.save(fos); } else { Torrent.load(new File(filenameValue), true); } } catch (Exception e) { logger.error("{}", e.getMessage(), e); System.exit(2); } finally { if (fos != System.out) { IOUtils.closeQuietly(fos); } } }
From source file:cmd.freebase2rdf.java
public static void main(String[] args) throws Exception { if (args.length != 2) { usage();//from w ww . j a va 2 s. c o m } File input = new File(args[0]); if (!input.exists()) error("File " + input.getAbsolutePath() + " does not exist."); if (!input.canRead()) error("Cannot read file " + input.getAbsolutePath()); if (!input.isFile()) error("Not a file " + input.getAbsolutePath()); File output = new File(args[1]); if (output.exists()) error("Output file " + output.getAbsolutePath() + " already exists, this program do not override existing files."); if (output.canWrite()) error("Cannot write file " + output.getAbsolutePath()); if (output.isDirectory()) error("Not a file " + output.getAbsolutePath()); if (!output.getName().endsWith(".nt.gz")) error("Output filename should end with .nt.gz, this is the only format supported."); BufferedReader in = new BufferedReader( new InputStreamReader(new BZip2CompressorInputStream(new FileInputStream(input)))); BufferedOutputStream out = new BufferedOutputStream(new GZIPOutputStream(new FileOutputStream(output))); String line; ProgressLogger progressLogger = new ProgressLogger(log, "lines", 100000, 1000000); progressLogger.start(); Freebase2RDF freebase2rdf = null; try { freebase2rdf = new Freebase2RDF(out); while ((line = in.readLine()) != null) { freebase2rdf.send(line); progressLogger.tick(); } } finally { if (freebase2rdf != null) freebase2rdf.close(); } print(log, progressLogger); }
From source file:com.bluexml.tools.miscellaneous.Translate.java
/** * @param args/*from w ww. ja v a 2 s . c o m*/ */ public static void main(String[] args) { System.out.println("Translate.main() 1"); Console console = System.console(); System.out.println("give path to folder that contains properties files"); Scanner scanIn = new Scanner(System.in); try { // TODO Auto-generated method stub String sWhatever; System.out.println("Translate.main() 2"); sWhatever = scanIn.nextLine(); System.out.println("Translate.main() 3"); System.out.println(sWhatever); File inDir = new File(sWhatever); FilenameFilter filter = new FilenameFilter() { public boolean accept(File arg0, String arg1) { return arg1.endsWith("properties"); } }; File[] listFiles = inDir.listFiles(filter); for (File file : listFiles) { prapareFileToTranslate(file, inDir); } System.out.println("please translate text files and press enter"); String readLine = scanIn.nextLine(); System.out.println("Translate.main() 4"); for (File file : listFiles) { File values = new File(file.getParentFile(), file.getName() + ".txt"); writeBackValues(values, file); values.delete(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { scanIn.close(); } }
From source file:Pathway2RDFv2.java
public static void main(String[] args) throws ParserConfigurationException, SAXException, IOException, ServiceException, ClassNotFoundException, IDMapperException, ParseException { int softwareVersion = 0; int schemaVersion = 0; int latestRevision = 0; BioDataSource.init();//from w ww . jav a 2 s. c o m Class.forName("org.bridgedb.rdb.IDMapperRdb"); File dir = new File("/Users/andra/Downloads/bridge"); File[] bridgeDbFiles = dir.listFiles(); IDMapperStack mapper = new IDMapperStack(); for (File bridgeDbFile : bridgeDbFiles) { System.out.println(bridgeDbFile.getAbsolutePath()); mapper.addIDMapper("idmapper-pgdb:" + bridgeDbFile.getAbsolutePath()); } Model bridgeDbmodel = ModelFactory.createDefaultModel(); InputStream in = new FileInputStream("/tmp/BioDataSource.ttl"); bridgeDbmodel.read(in, "", "TURTLE"); WikiPathwaysClient client = new WikiPathwaysClient( new URL("http://www.wikipathways.org/wpi/webservice/webservice.php")); basicCalls.printMemoryStatus(); //Map wikipathway organisms to NCBI organisms HashMap<String, String> organismTaxonomy = wpRelatedCalls.getOrganismsTaxonomyMapping(); //HashMap<String, String> miriamSources = new HashMap<String, String>(); // HashMap<String, Str ing> miriamLinks = basicCalls.getMiriamUriBridgeDb(); //Document wikiPathwaysDom = basicCalls.openXmlFile(args[0]); Document wikiPathwaysDom = basicCalls.openXmlFile("/tmp/WpGPML.xml"); //initiate the Jena model to be populated Model model = ModelFactory.createDefaultModel(); Model voidModel = ModelFactory.createDefaultModel(); voidModel.setNsPrefix("xsd", XSD.getURI()); voidModel.setNsPrefix("void", Void.getURI()); voidModel.setNsPrefix("wprdf", "http://rdf.wikipathways.org/"); voidModel.setNsPrefix("pav", Pav.getURI()); voidModel.setNsPrefix("prov", Prov.getURI()); voidModel.setNsPrefix("dcterms", DCTerms.getURI()); voidModel.setNsPrefix("biopax", Biopax_level3.getURI()); voidModel.setNsPrefix("gpml", Gpml.getURI()); voidModel.setNsPrefix("wp", Wp.getURI()); voidModel.setNsPrefix("foaf", FOAF.getURI()); voidModel.setNsPrefix("hmdb", "http://identifiers.org/hmdb/"); voidModel.setNsPrefix("freq", Freq.getURI()); voidModel.setNsPrefix("dc", DC.getURI()); setModelPrefix(model); //Populate void.ttl Calendar now = Calendar.getInstance(); Literal nowLiteral = voidModel.createTypedLiteral(now); Literal titleLiteral = voidModel.createLiteral("WikiPathways-RDF VoID Description", "en"); Literal descriptionLiteral = voidModel .createLiteral("This is the VoID description for a WikiPathwyas-RDF dataset.", "en"); Resource voidBase = voidModel.createResource("http://rdf.wikipathways.org/"); Resource identifiersOrg = voidModel.createResource("http://identifiers.org"); Resource wpHomeBase = voidModel.createResource("http://www.wikipathways.org/"); Resource authorResource = voidModel .createResource("http://semantics.bigcat.unimaas.nl/figshare/search_author.php?author=waagmeester"); Resource apiResource = voidModel .createResource("http://www.wikipathways.org/wpi/webservice/webservice.php"); Resource mainDatadump = voidModel.createResource("http://rdf.wikipathways.org/wpContent.ttl.gz"); Resource license = voidModel.createResource("http://creativecommons.org/licenses/by/3.0/"); Resource instituteResource = voidModel.createResource("http://dbpedia.org/page/Maastricht_University"); voidBase.addProperty(RDF.type, Void.Dataset); voidBase.addProperty(DCTerms.title, titleLiteral); voidBase.addProperty(DCTerms.description, descriptionLiteral); voidBase.addProperty(FOAF.homepage, wpHomeBase); voidBase.addProperty(DCTerms.license, license); voidBase.addProperty(Void.uriSpace, voidBase); voidBase.addProperty(Void.uriSpace, identifiersOrg); voidBase.addProperty(Pav.importedBy, authorResource); voidBase.addProperty(Pav.importedFrom, apiResource); voidBase.addProperty(Pav.importedOn, nowLiteral); voidBase.addProperty(Void.dataDump, mainDatadump); voidBase.addProperty(Voag.frequencyOfChange, Freq.Irregular); voidBase.addProperty(Pav.createdBy, authorResource); voidBase.addProperty(Pav.createdAt, instituteResource); voidBase.addLiteral(Pav.createdOn, nowLiteral); voidBase.addProperty(DCTerms.subject, Biopax_level3.Pathway); voidBase.addProperty(Void.exampleResource, voidModel.createResource("http://identifiers.org/ncbigene/2678")); voidBase.addProperty(Void.exampleResource, voidModel.createResource("http://identifiers.org/pubmed/15215856")); voidBase.addProperty(Void.exampleResource, voidModel.createResource("http://identifiers.org/hmdb/HMDB02005")); voidBase.addProperty(Void.exampleResource, voidModel.createResource("http://rdf.wikipathways.org/WP15")); voidBase.addProperty(Void.exampleResource, voidModel.createResource("http://identifiers.org/obo.chebi/17242")); for (String organism : organismTaxonomy.values()) { voidBase.addProperty(DCTerms.subject, voidModel.createResource("http://dbpedia.org/page/" + organism.replace(" ", "_"))); } voidBase.addProperty(Void.vocabulary, Biopax_level3.NAMESPACE); voidBase.addProperty(Void.vocabulary, voidModel.createResource(Wp.getURI())); voidBase.addProperty(Void.vocabulary, voidModel.createResource(Gpml.getURI())); voidBase.addProperty(Void.vocabulary, FOAF.NAMESPACE); voidBase.addProperty(Void.vocabulary, Pav.NAMESPACE); //Custom Properties String baseUri = "http://rdf.wikipathways.org/"; NodeList pathwayElements = wikiPathwaysDom.getElementsByTagName("Pathway"); //BioDataSource.init(); for (int i = 0; i < pathwayElements.getLength(); i++) { Model pathwayModel = createPathwayModel(); String wpId = pathwayElements.item(i).getAttributes().getNamedItem("identifier").getTextContent(); String revision = pathwayElements.item(i).getAttributes().getNamedItem("revision").getTextContent(); String pathwayOrganism = ""; if (pathwayElements.item(i).getAttributes().getNamedItem("Organism") != null) pathwayOrganism = pathwayElements.item(i).getAttributes().getNamedItem("Organism").getTextContent() .trim(); if (Integer.valueOf(revision) > latestRevision) { latestRevision = Integer.valueOf(revision); } File f = new File("/tmp/" + args[0] + "/" + wpId + "_r" + revision + ".ttl"); System.out.println(f.getName()); if (!f.exists()) { Resource voidPwResource = wpRelatedCalls.addVoidTriples(voidModel, voidBase, pathwayElements.item(i), client); Resource pwResource = wpRelatedCalls.addPathwayLevelTriple(pathwayModel, pathwayElements.item(i), organismTaxonomy); // Get the comments NodeList commentElements = ((Element) pathwayElements.item(i)).getElementsByTagName("Comment"); wpRelatedCalls.addCommentTriples(pathwayModel, pwResource, commentElements, wpId, revision); // Get the Groups NodeList groupElements = ((Element) pathwayElements.item(i)).getElementsByTagName("Group"); for (int n = 0; n < groupElements.getLength(); n++) { wpRelatedCalls.addGroupTriples(pathwayModel, pwResource, groupElements.item(n), wpId, revision); } // Get all the Datanodes NodeList dataNodesElement = ((Element) pathwayElements.item(i)).getElementsByTagName("DataNode"); for (int j = 0; j < dataNodesElement.getLength(); j++) { wpRelatedCalls.addDataNodeTriples(pathwayModel, pwResource, dataNodesElement.item(j), wpId, revision, bridgeDbmodel, mapper); } // Get all the lines NodeList linesElement = ((Element) pathwayElements.item(i)).getElementsByTagName("Line"); for (int k = 0; k < linesElement.getLength(); k++) { wpRelatedCalls.addLineTriples(pathwayModel, pwResource, linesElement.item(k), wpId, revision); } //Get all the labels NodeList labelsElement = ((Element) pathwayElements.item(i)).getElementsByTagName("Label"); for (int l = 0; l < labelsElement.getLength(); l++) { wpRelatedCalls.addLabelTriples(pathwayModel, pwResource, labelsElement.item(l), wpId, revision); } NodeList referenceElements = ((Element) pathwayElements.item(i)) .getElementsByTagName("bp:PublicationXref"); for (int m = 0; m < referenceElements.getLength(); m++) { wpRelatedCalls.addReferenceTriples(pathwayModel, pwResource, referenceElements.item(m), wpId, revision); } NodeList referenceElements2 = ((Element) pathwayElements.item(i)) .getElementsByTagName("bp:publicationXref"); for (int m = 0; m < referenceElements2.getLength(); m++) { wpRelatedCalls.addReferenceTriples(pathwayModel, pwResource, referenceElements2.item(m), wpId, revision); } NodeList referenceElements3 = ((Element) pathwayElements.item(i)) .getElementsByTagName("bp:PublicationXRef"); for (int m = 0; m < referenceElements3.getLength(); m++) { wpRelatedCalls.addReferenceTriples(pathwayModel, pwResource, referenceElements3.item(m), wpId, revision); } NodeList ontologyElements = ((Element) pathwayElements.item(i)) .getElementsByTagName("bp:openControlledVocabulary"); for (int n = 0; n < ontologyElements.getLength(); n++) { wpRelatedCalls.addPathwayOntologyTriples(pathwayModel, pwResource, ontologyElements.item(n)); } System.out.println(wpId); basicCalls.saveRDF2File(pathwayModel, "/tmp/" + args[0] + "/" + wpId + "_r" + revision + ".ttl", "TURTLE"); model.add(pathwayModel); pathwayModel.removeAll(); } } Date myDate = new Date(); SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); String myDateString = sdf.format(myDate); FileUtils.writeStringToFile(new File("latestVersion.txt"), "v" + schemaVersion + "." + softwareVersion + "." + latestRevision + "_" + myDateString); basicCalls.saveRDF2File(model, "/tmp/wpContent_v" + schemaVersion + "." + softwareVersion + "." + latestRevision + "_" + myDateString + ".ttl", "TURTLE"); basicCalls.saveRDF2File(voidModel, "/tmp/void.ttl", "TURTLE"); }
From source file:com.hp.test.framework.Utlis.java
public static void main(String ar[]) throws IOException { ArrayList<String> runs_list = new ArrayList<String>(); String basepath = replacelogs(); String path = basepath + "ATU Reports\\Results"; File directory = new File(path); File[] subdirs = directory.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY); for (File dir : subdirs) { log.info("Directory: " + dir.getName()); runs_list.add(dir.getName());//from w w w .j a v a 2 s . c o m } Map<String, List<String>> runCounts = GetCountsrunsWise(runs_list, path); int success = replaceCounts(runCounts, path); if (success == 0) { File SourceFile = new File(path + "\\lineChart_temp.js"); File RenameFile = new File(path + "\\lineChart.js"); renameOriginalFile(SourceFile, RenameFile); File SourceFile1 = new File(path + "\\barChart_temp.js"); File RenameFile1 = new File(path + "\\barChart.js"); renameOriginalFile(SourceFile1, RenameFile1); String Reports_path = basepath + "ATU Reports\\"; int LatestRun = Utlis.getLastRun(Reports_path); Utlis.getpaths(Reports_path + "\\Results\\Run_" + String.valueOf(LatestRun)); try { Utlis.replaceMainTable(false, new File(Reports_path + "index.html")); Utlis.replaceMainTable(true, new File(Reports_path + "results\\" + "ConsolidatedPage.html")); Utlis.replaceMainTable(true, new File(Reports_path + "Results\\Run_" + String.valueOf(LatestRun) + "CurrentRun.html")); fileList.add( new File(Reports_path + "\\Results\\Run_" + String.valueOf(LatestRun) + "CurrentRun.html")); for (File f : fileList) { Utlis.replaceMainTable(true, f); } } catch (Exception ex) { log.error("Error in updating Report format" + ex.getMessage()); } } }