List of usage examples for java.io File isFile
public boolean isFile()
From source file:net.mybox.mybox.Server.java
/** * Handle the command line args and instantiate the Server * @param args//from ww w . j ava 2 s .c o m */ public static void main(String args[]) { Options options = new Options(); options.addOption("c", "config", true, "configuration file"); // options.addOption("d", "database", true, "accounts database file"); // TODO: handle in config? options.addOption("a", "apphome", true, "application home directory"); options.addOption("h", "help", false, "show help screen"); options.addOption("V", "version", false, "print the Mybox version"); CommandLineParser line = new GnuParser(); CommandLine cmd = null; try { cmd = line.parse(options, args); } catch (ParseException exp) { System.err.println(exp.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(Server.class.getName(), options); return; } if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(Server.class.getName(), options); return; } if (cmd.hasOption("V")) { Client.printMessage("version " + Common.appVersion); return; } if (cmd.hasOption("a")) { String appHomeDir = cmd.getOptionValue("a"); try { Common.updatePaths(appHomeDir); } catch (FileNotFoundException e) { printErrorExit(e.getMessage()); } updatePaths(); } String configFile = defaultConfigFile; // String accountsDBfile = defaultAccountsDbFile; if (cmd.hasOption("c")) { configFile = cmd.getOptionValue("c"); } File fileCheck = new File(configFile); if (!fileCheck.isFile()) Server.printErrorExit("Config not found: " + configFile + "\nPlease run ServerSetup"); // if (cmd.hasOption("d")){ // accountsDBfile = cmd.getOptionValue("d"); // } // // fileCheck = new File(accountsDBfile); // if (!fileCheck.isFile()) // Server.printErrorExit("Error: account database not found " + accountsDBfile); Server server = new Server(configFile); }
From source file:net.sf.mcf2pdf.Main.java
@SuppressWarnings("static-access") public static void main(String[] args) { Options options = new Options(); Option o = OptionBuilder.hasArg().isRequired() .withDescription("Installation location of My CEWE Photobook. REQUIRED.").create('i'); options.addOption(o);/*from w w w. jav a 2s .c om*/ options.addOption("h", false, "Prints this help and exits."); options.addOption("t", true, "Location of MCF temporary files."); options.addOption("w", true, "Location for temporary images generated during conversion."); options.addOption("r", true, "Sets the resolution to use for page rendering, in DPI. Default is 150."); options.addOption("n", true, "Sets the page number to render up to. Default renders all pages."); options.addOption("b", false, "Prevents rendering of binding between double pages."); options.addOption("x", false, "Generates only XSL-FO content instead of PDF content."); options.addOption("q", false, "Quiet mode - only errors are logged."); options.addOption("d", false, "Enables debugging logging output."); CommandLine cl; try { CommandLineParser parser = new PosixParser(); cl = parser.parse(options, args); } catch (ParseException pe) { printUsage(options, pe); System.exit(3); return; } if (cl.hasOption("h")) { printUsage(options, null); return; } if (cl.getArgs().length != 2) { printUsage(options, new ParseException("INFILE and OUTFILE must be specified. Arguments were: " + cl.getArgList())); System.exit(3); return; } File installDir = new File(cl.getOptionValue("i")); if (!installDir.isDirectory()) { printUsage(options, new ParseException("Specified installation directory does not exist.")); System.exit(3); return; } File tempDir = null; String sTempDir = cl.getOptionValue("t"); if (sTempDir == null) { tempDir = new File(new File(System.getProperty("user.home")), ".mcf"); if (!tempDir.isDirectory()) { printUsage(options, new ParseException("MCF temporary location not specified and default location " + tempDir + " does not exist.")); System.exit(3); return; } } else { tempDir = new File(sTempDir); if (!tempDir.isDirectory()) { printUsage(options, new ParseException("Specified temporary location does not exist.")); System.exit(3); return; } } File mcfFile = new File(cl.getArgs()[0]); if (!mcfFile.isFile()) { printUsage(options, new ParseException("MCF input file does not exist.")); System.exit(3); return; } mcfFile = mcfFile.getAbsoluteFile(); File tempImages = new File(new File(System.getProperty("user.home")), ".mcf2pdf"); if (cl.hasOption("w")) { tempImages = new File(cl.getOptionValue("w")); if (!tempImages.mkdirs() && !tempImages.isDirectory()) { printUsage(options, new ParseException("Specified working dir does not exist and could not be created.")); System.exit(3); return; } } int dpi = 150; if (cl.hasOption("r")) { try { dpi = Integer.valueOf(cl.getOptionValue("r")).intValue(); if (dpi < 30 || dpi > 600) throw new IllegalArgumentException(); } catch (Exception e) { printUsage(options, new ParseException("Parameter for option -r must be an integer between 30 and 600.")); } } int maxPageNo = -1; if (cl.hasOption("n")) { try { maxPageNo = Integer.valueOf(cl.getOptionValue("n")).intValue(); if (maxPageNo < 0) throw new IllegalArgumentException(); } catch (Exception e) { printUsage(options, new ParseException("Parameter for option -n must be an integer >= 0.")); } } boolean binding = true; if (cl.hasOption("b")) { binding = false; } OutputStream finalOut; if (cl.getArgs()[1].equals("-")) finalOut = System.out; else { try { finalOut = new FileOutputStream(cl.getArgs()[1]); } catch (IOException e) { printUsage(options, new ParseException("Output file could not be created.")); System.exit(3); return; } } // configure logging, if no system property is present if (System.getProperty("log4j.configuration") == null) { PropertyConfigurator.configure(Main.class.getClassLoader().getResource("log4j.properties")); Logger.getRootLogger().setLevel(Level.INFO); if (cl.hasOption("q")) Logger.getRootLogger().setLevel(Level.ERROR); if (cl.hasOption("d")) Logger.getRootLogger().setLevel(Level.DEBUG); } // start conversion to XSL-FO // if -x is specified, this is the only thing we do OutputStream xslFoOut; if (cl.hasOption("x")) xslFoOut = finalOut; else xslFoOut = new ByteArrayOutputStream(); Log log = LogFactory.getLog(Main.class); try { new Mcf2FoConverter(installDir, tempDir, tempImages).convert(mcfFile, xslFoOut, dpi, binding, maxPageNo); xslFoOut.flush(); if (!cl.hasOption("x")) { // convert to PDF log.debug("Converting XSL-FO data to PDF"); byte[] data = ((ByteArrayOutputStream) xslFoOut).toByteArray(); PdfUtil.convertFO2PDF(new ByteArrayInputStream(data), finalOut, dpi); finalOut.flush(); } } catch (Exception e) { log.error("An exception has occured", e); System.exit(1); return; } finally { if (finalOut instanceof FileOutputStream) { try { finalOut.close(); } catch (Exception e) { } } } }
From source file:DIA_Umpire_Quant.DIA_Umpire_LCMSIDGen.java
/** * @param args the command line arguments *//*from w w w .ja v a 2 s . co m*/ public static void main(String[] args) throws FileNotFoundException, IOException, Exception { System.out.println( "================================================================================================="); System.out.println("DIA-Umpire LCMSID geneartor (version: " + UmpireInfo.GetInstance().Version + ")"); if (args.length != 1) { System.out.println( "command format error, the correct format should be: java -jar -Xmx10G DIA_Umpire_LCMSIDGen.jar diaumpire_module.params"); return; } try { ConsoleLogger.SetConsoleLogger(Level.INFO); ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_lcmsidgen.log"); } catch (Exception e) { } Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version); Logger.getRootLogger().info("Parameter file:" + args[0]); BufferedReader reader = new BufferedReader(new FileReader(args[0])); String line = ""; String WorkFolder = ""; int NoCPUs = 2; TandemParam tandemPara = new TandemParam(DBSearchParam.SearchInstrumentType.TOF5600); HashMap<String, File> AssignFiles = new HashMap<>(); //<editor-fold defaultstate="collapsed" desc="Reading parameter file"> while ((line = reader.readLine()) != null) { line = line.trim(); Logger.getRootLogger().info(line); if (!"".equals(line) && !line.startsWith("#")) { //System.out.println(line); if (line.equals("==File list begin")) { do { line = reader.readLine(); line = line.trim(); if (line.equals("==File list end")) { continue; } else if (!"".equals(line)) { File newfile = new File(line); if (newfile.exists()) { AssignFiles.put(newfile.getAbsolutePath(), newfile); } else { Logger.getRootLogger().info("File: " + newfile + " does not exist."); } } } while (!line.equals("==File list end")); } if (line.split("=").length < 2) { continue; } String type = line.split("=")[0].trim(); String value = line.split("=")[1].trim(); switch (type) { case "Path": { WorkFolder = value; break; } case "path": { WorkFolder = value; break; } case "Thread": { NoCPUs = Integer.parseInt(value); break; } case "DecoyPrefix": { if (!"".equals(value)) { tandemPara.DecoyPrefix = value; } break; } case "PeptideFDR": { tandemPara.PepFDR = Float.parseFloat(value); break; } } } } //</editor-fold> //Initialize PTM manager using compomics library PTMManager.GetInstance(); //Generate DIA file list ArrayList<DIAPack> FileList = new ArrayList<>(); File folder = new File(WorkFolder); if (!folder.exists()) { Logger.getRootLogger().info("The path : " + WorkFolder + " cannot be found."); System.exit(1); } for (final File fileEntry : folder.listFiles()) { if (fileEntry.isFile() && (fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry.getAbsolutePath(), fileEntry); } if (fileEntry.isDirectory()) { for (final File fileEntry2 : fileEntry.listFiles()) { if (fileEntry2.isFile() && (fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry2.getAbsolutePath(), fileEntry2); } } } } Logger.getRootLogger().info("No. of files assigned :" + AssignFiles.size()); for (File fileEntry : AssignFiles.values()) { Logger.getRootLogger().info(fileEntry.getAbsolutePath()); } //process each DIA file to genearate untargeted identifications for (File fileEntry : AssignFiles.values()) { String mzXMLFile = fileEntry.getAbsolutePath(); if (mzXMLFile.toLowerCase().endsWith(".mzxml") | mzXMLFile.toLowerCase().endsWith(".mzml")) { long time = System.currentTimeMillis(); DIAPack DiaFile = new DIAPack(mzXMLFile, NoCPUs); FileList.add(DiaFile); Logger.getRootLogger().info( "================================================================================================="); Logger.getRootLogger().info("Processing " + mzXMLFile); if (!DiaFile.LoadDIASetting()) { Logger.getRootLogger().info("Loading DIA setting failed, job is incomplete"); System.exit(1); } if (!DiaFile.LoadParams()) { Logger.getRootLogger().info("Loading parameters failed, job is incomplete"); System.exit(1); } Logger.getRootLogger().info("Loading identification results " + mzXMLFile + "...."); DiaFile.ParsePepXML(tandemPara, null); DiaFile.BuildStructure(); if (!DiaFile.MS1FeatureMap.ReadPeakCluster()) { Logger.getRootLogger().info("Loading peak and structure failed, job is incomplete"); System.exit(1); } DiaFile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster(); //Generate mapping between index of precursor feature and pseudo MS/MS scan index DiaFile.GenerateClusterScanNomapping(); //Doing quantification DiaFile.AssignQuant(); DiaFile.ClearStructure(); DiaFile.IDsummary.ReduceMemoryUsage(); time = System.currentTimeMillis() - time; Logger.getRootLogger().info(mzXMLFile + " processed time:" + String.format("%d hour, %d min, %d sec", TimeUnit.MILLISECONDS.toHours(time), TimeUnit.MILLISECONDS.toMinutes(time) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time)), TimeUnit.MILLISECONDS.toSeconds(time) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time)))); } Logger.getRootLogger().info("Job done"); Logger.getRootLogger().info( "================================================================================================="); } }
From source file:gemlite.core.commands.WServer.java
public static void main(String[] args2) throws Exception { String[] defaultArgs = new String[] { "D:/work/data/Gemlite-demo/target/Gemlite-demo-0.0.1-SNAPSHOT.war", "8082", "/" }; defaultArgs = args2 == null || args2.length == 0 ? defaultArgs : args2; ServerConfigHelper.initConfig();//from w ww . j av a2 s. c om ServerConfigHelper.initLog4j("classpath:log4j2-server.xml"); ServerConfigHelper.setProperty("bind-address", ServerConfigHelper.getConfig(ITEMS.BINDIP)); int port = 8080; String contextPath = "/"; String warPath = ""; if (defaultArgs.length < 1) { LogUtil.getCoreLog().error( "Start error,plelase Start Ws server like this : java gemlite.core.command.WServer /home/ws.war"); return; } warPath = defaultArgs[0]; File file = new File(warPath); if (!file.exists()) { LogUtil.getCoreLog().error("Error input:" + defaultArgs[0] + " war path is not existing!"); return; } if (!file.isFile()) { LogUtil.getCoreLog().error("Error input:" + defaultArgs[0] + " war path is not a valid file!"); return; } if (defaultArgs.length > 1) { port = NumberUtils.toInt(defaultArgs[1]); if (port <= 0 || port >= 65535) { LogUtil.getCoreLog().error( "Port Error:" + defaultArgs[1] + ",not a valid port , make sure port>0 and port<65535"); return; } } if (defaultArgs.length > 2) { contextPath += StringUtils.replace(defaultArgs[2], "/", ""); } try { // jetty_home String jetty_home = ServerConfigHelper.getConfig(ITEMS.GS_WORK) + File.separator + "jetty_home"; jetty_home += File.separator + StringUtils.replace(contextPath, "/", "") + port; File jfile = new File(jetty_home); jfile.mkdirs(); System.setProperty("jetty.home", jetty_home); String jetty_logs = jetty_home + File.separator + "logs" + File.separator; File logsFile = new File(jetty_logs); logsFile.mkdirs(); System.setProperty("jetty.logs", jetty_logs); Server server = new Server(); HttpConfiguration config = new HttpConfiguration(); ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(config)); connector.setReuseAddress(true); connector.setIdleTimeout(30000); connector.setPort(port); server.addConnector(connector); WebAppContext webapp = new WebAppContext(); webapp.setContextPath(contextPath); webapp.setWar(warPath); String tmpStr = jetty_home + File.separator + "webapps" + File.separator; File tmpDir = new File(tmpStr); tmpDir.mkdirs(); webapp.setTempDirectory(tmpDir); // ??? // webapp.setExtraClasspath(extrapath); webapp.setParentLoaderPriority(true); // ?Log RequestLogHandler requestLogHandler = new RequestLogHandler(); NCSARequestLog requestLog = new NCSARequestLog( jetty_logs + File.separator + "jetty-yyyy_mm_dd.request.log"); requestLog.setRetainDays(30); requestLog.setAppend(true); requestLog.setExtended(false); requestLog.setLogTimeZone(TimeZone.getDefault().getID()); requestLogHandler.setRequestLog(requestLog); webapp.setHandler(requestLogHandler); ContextHandler ch = webapp.getServletContext().getContextHandler(); ch.setLogger(new Slf4jLog("gemlite.coreLog")); server.setHandler(webapp); server.start(); System.out.println("-----------------------------------------------------"); LogUtil.getCoreLog().info("Ws Server started,You can visite -> http://" + ServerConfigHelper.getConfig(ITEMS.BINDIP) + ":" + port + contextPath); server.join(); } catch (Exception e) { LogUtil.getCoreLog().error("Ws Server error:", e); } }
From source file:net.sf.mpaxs.test.ImpaxsExecution.java
/** * * @param args//w ww.j a v a2s . c o m */ public static void main(String[] args) { Options options = new Options(); Option[] optionArray = new Option[] { OptionBuilder.withArgName("nhosts").hasArg() .withDescription("Number of hosts for parallel processing").create("n"), OptionBuilder.withArgName("mjobs").hasArg().withDescription("Number of jobs to run in parallel") .create("m"), OptionBuilder.withArgName("runmode").hasArg() .withDescription("The mode in which to operate: one of <ALL,LOCAL,DISTRIBUTED>") .create("r"), // OptionBuilder.withArgName("gui"). // withDescription("Create gui for distributed execution").create("g") }; for (Option opt : optionArray) { options.addOption(opt); } if (args.length == 0) { HelpFormatter hf = new HelpFormatter(); hf.printHelp(StartUp.class.getCanonicalName(), options, true); System.exit(1); } GnuParser gp = new GnuParser(); int nhosts = 1; int mjobs = 10; boolean gui = false; Mode mode = Mode.ALL; try { CommandLine cl = gp.parse(options, args); if (cl.hasOption("n")) { nhosts = Integer.parseInt(cl.getOptionValue("n")); } if (cl.hasOption("m")) { mjobs = Integer.parseInt(cl.getOptionValue("m")); } if (cl.hasOption("r")) { mode = Mode.valueOf(cl.getOptionValue("r")); } // if (cl.hasOption("g")) { // gui = true; // } } catch (Exception ex) { Logger.getLogger(StartUp.class.getName()).log(Level.SEVERE, null, ex); HelpFormatter hf = new HelpFormatter(); hf.printHelp(StartUp.class.getCanonicalName(), options, true); System.exit(1); } String version; try { version = net.sf.mpaxs.api.Version.getVersion(); System.out.println("Running mpaxs " + version); File computeHostJarLocation = new File(System.getProperty("user.dir"), "mpaxs.jar"); if (!computeHostJarLocation.exists() || !computeHostJarLocation.isFile()) { throw new IOException("Could not locate mpaxs.jar in " + System.getProperty("user.dir")); } final PropertiesConfiguration cfg = new PropertiesConfiguration(); //set default execution type cfg.setProperty(ConfigurationKeys.KEY_EXECUTION_MODE, ExecutionType.DRMAA); //set location of compute host jar cfg.setProperty(ConfigurationKeys.KEY_PATH_TO_COMPUTEHOST_JAR, computeHostJarLocation); //do not exit to console when master server shuts down cfg.setProperty(ConfigurationKeys.KEY_MASTER_SERVER_EXIT_ON_SHUTDOWN, false); //limit the number of used compute hosts cfg.setProperty(ConfigurationKeys.KEY_MAX_NUMBER_OF_CHOSTS, nhosts); cfg.setProperty(ConfigurationKeys.KEY_NATIVE_SPEC, ""); cfg.setProperty(ConfigurationKeys.KEY_GUI_MODE, gui); cfg.setProperty(ConfigurationKeys.KEY_SILENT_MODE, true); cfg.setProperty(ConfigurationKeys.KEY_SCHEDULE_WAIT_TIME, "500"); final int maxJobs = mjobs; final int maxThreads = nhosts; final Mode runMode = mode; printMessage("Run mode: " + runMode); Executors.newSingleThreadExecutor().submit(new Runnable() { @Override public void run() { if (runMode == Mode.ALL || runMode == Mode.LOCAL) { printMessage("Running Within VM Execution"); /* * LOCAL within VM execution */ WithinVmExecution lhe = new WithinVmExecution(maxJobs, maxThreads); try { Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.INFO, "Sum is: " + lhe.call()); } catch (Exception ex) { Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.SEVERE, null, ex); } } if (runMode == Mode.ALL || runMode == Mode.DISTRIBUTED) { printMessage("Running Distributed Host RMI Execution"); /* * Grid Engine (DRMAA API) or local host distributed RMI execution */ DistributedRmiExecution de = new DistributedRmiExecution(cfg, maxJobs); try { Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.INFO, "Sum is: " + de.call()); } catch (Exception ex) { Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.SEVERE, null, ex); } } System.exit(0); } }); } catch (IOException ex) { Logger.getLogger(ImpaxsExecution.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:edu.ucla.cs.scai.swim.qa.ontology.dbpedia.tipicality.Test.java
public static void main(String[] args) throws IOException, ClassNotFoundException { String path = DBpediaOntology.DBPEDIA_CSV_FOLDER; if (args != null && args.length > 0) { path = args[0];/*www.j av a 2 s .c om*/ if (!path.endsWith("/")) { path = path + "/"; } } stopAttributes.add("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"); stopAttributes.add("http://www.w3.org/2002/07/owl#sameAs"); stopAttributes.add("http://dbpedia.org/ontology/wikiPageRevisionID"); stopAttributes.add("http://dbpedia.org/ontology/wikiPageID"); stopAttributes.add("http://purl.org/dc/elements/1.1/description"); stopAttributes.add("http://dbpedia.org/ontology/thumbnail"); stopAttributes.add("http://dbpedia.org/ontology/type"); try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(path + "counts.bin"))) { categories = (HashSet<String>) ois.readObject(); attributes = (HashSet<String>) ois.readObject(); categoryCount = (HashMap<String, Integer>) ois.readObject(); attributeCount = (HashMap<String, Integer>) ois.readObject(); categoryAttributeCount = (HashMap<String, HashMap<String, Integer>>) ois.readObject(); attributeCategoryCount = (HashMap<String, HashMap<String, Integer>>) ois.readObject(); } System.out.println(categories.size() + " categories found"); System.out.println(attributes.size() + " attributes found"); n = 0; for (Map.Entry<String, Integer> e : categoryCount.entrySet()) { n += e.getValue(); } System.out.println(n); HashMap<String, ArrayList<Pair>> sortedCategoryAttributes = new HashMap<>(); for (String category : categories) { //System.out.println(category); //System.out.println("-----------"); ArrayList<Pair> attributesRank = new ArrayList<Pair>(); Integer c = categoryCount.get(category); if (c == null || c == 0) { continue; } HashMap<String, Integer> thisCategoryAttributeCount = categoryAttributeCount.get(category); for (Map.Entry<String, Integer> e : thisCategoryAttributeCount.entrySet()) { attributesRank.add(new Pair(e.getKey(), 1.0 * e.getValue() / c)); } Collections.sort(attributesRank); for (Pair p : attributesRank) { //System.out.println("A:" + p.getS() + "\t" + p.getP()); } //System.out.println("==============================="); sortedCategoryAttributes.put(category, attributesRank); } for (String attribute : attributes) { //System.out.println(attribute); //System.out.println("-----------"); ArrayList<Pair> categoriesRank = new ArrayList<>(); Integer a = attributeCount.get(attribute); if (a == null || a == 0) { continue; } HashMap<String, Integer> thisAttributeCategoryCount = attributeCategoryCount.get(attribute); for (Map.Entry<String, Integer> e : thisAttributeCategoryCount.entrySet()) { categoriesRank.add(new Pair(e.getKey(), 1.0 * e.getValue() / a)); } Collections.sort(categoriesRank); for (Pair p : categoriesRank) { //System.out.println("C:" + p.getS() + "\t" + p.getP()); } //System.out.println("==============================="); } HashMap<Integer, Integer> histogram = new HashMap<>(); histogram.put(0, 0); histogram.put(1, 0); histogram.put(2, 0); histogram.put(Integer.MAX_VALUE, 0); int nTest = 0; if (args != null && args.length > 0) { path = args[0]; if (!path.endsWith("/")) { path = path + "/"; } } for (File f : new File(path).listFiles()) { if (f.isFile() && f.getName().endsWith(".csv")) { String category = f.getName().replaceFirst("\\.csv", ""); System.out.println("Category: " + category); ArrayList<HashSet<String>> entities = extractEntities(f, 2); for (HashSet<String> attributesOfThisEntity : entities) { nTest++; ArrayList<String> rankedCategories = rankedCategories(attributesOfThisEntity); boolean found = false; for (int i = 0; i < rankedCategories.size() && !found; i++) { if (rankedCategories.get(i).equals(category)) { Integer count = histogram.get(i); if (count == null) { histogram.put(i, 1); } else { histogram.put(i, count + 1); } found = true; } } if (!found) { histogram.put(Integer.MAX_VALUE, histogram.get(Integer.MAX_VALUE) + 1); } } System.out.println("Tested entities: " + nTest); System.out.println("1: " + histogram.get(0)); System.out.println("2: " + histogram.get(1)); System.out.println("3: " + histogram.get(2)); System.out.println("+3: " + (nTest - histogram.get(2) - histogram.get(1) - histogram.get(0) - histogram.get(Integer.MAX_VALUE))); System.out.println("NF: " + histogram.get(Integer.MAX_VALUE)); } } }
From source file:eu.fbk.utils.lsa.util.AnvurDev.java
public static void main(String[] args) throws Exception { String logConfig = System.getProperty("log-config"); if (logConfig == null) { logConfig = "log-config.txt"; }/*from www . j a v a 2 s .co m*/ PropertyConfigurator.configure(logConfig); if (args.length != 8) { System.out.println(args.length); System.out.println( "Usage: java -mx2G eu.fbk.utils.lsa.util.AnvurDev root-lsa-en root-lsa-it threshold-lsa size-lsa dim-lsa idf-lsa in-file-tsv fields-tsv\n\n"); System.exit(1); } // DecimalFormat dec = new DecimalFormat("#.00"); File enUt = new File(args[0] + "-Ut"); File enSk = new File(args[0] + "-S"); File enr = new File(args[0] + "-row"); File enc = new File(args[0] + "-col"); File endf = new File(args[0] + "-df"); File itUt = new File(args[1] + "-Ut"); File itSk = new File(args[1] + "-S"); File itr = new File(args[1] + "-row"); File itc = new File(args[1] + "-col"); File itdf = new File(args[1] + "-df"); double threshold = Double.parseDouble(args[2]); int size = Integer.parseInt(args[3]); int dim = Integer.parseInt(args[4]); boolean rescaleIdf = Boolean.parseBoolean(args[5]); LSM enLsm = new LSM(enUt, enSk, enr, enc, endf, dim, rescaleIdf); LSM itLsm = new LSM(itUt, itSk, itr, itc, itdf, dim, rescaleIdf); File in = new File(args[6]); if (in.isFile()) { run(enLsm, itLsm, args[6], args[7]); } else if (in.isDirectory()) { FolderScanner fs = new FolderScanner(in); fs.setFiler(new TsvFilter()); int count = 0; while (fs.hasNext()) { Object[] files = fs.next(); System.out.println((count++) + " : " + files.length); for (int i = 0; i < files.length; i++) { String name = ((File) files[i]).getAbsolutePath(); System.out.println(name); run(enLsm, itLsm, name, args[7]); } // end for i } // end while } }
From source file:eu.fbk.utils.lsa.util.AnvurTest.java
public static void main(String[] args) throws Exception { String logConfig = System.getProperty("log-config"); if (logConfig == null) { logConfig = "log-config.txt"; }//from w ww .j a v a2 s . co m PropertyConfigurator.configure(logConfig); if (args.length != 8) { System.out.println(args.length); System.out.println( "Usage: java -mx2G eu.fbk.utils.lsa.util.AnvurTest root-lsa-en root-lsa-it threshold-lsa size-lsa dim-lsa idf-lsa in-file-tsv fields-tsv\n\n"); System.exit(1); } // DecimalFormat dec = new DecimalFormat("#.00"); File enUt = new File(args[0] + "-Ut"); File enSk = new File(args[0] + "-S"); File enr = new File(args[0] + "-row"); File enc = new File(args[0] + "-col"); File endf = new File(args[0] + "-df"); File itUt = new File(args[1] + "-Ut"); File itSk = new File(args[1] + "-S"); File itr = new File(args[1] + "-row"); File itc = new File(args[1] + "-col"); File itdf = new File(args[1] + "-df"); double threshold = Double.parseDouble(args[2]); int size = Integer.parseInt(args[3]); int dim = Integer.parseInt(args[4]); boolean rescaleIdf = Boolean.parseBoolean(args[5]); LSM enLsm = new LSM(enUt, enSk, enr, enc, endf, dim, rescaleIdf); LSM itLsm = new LSM(itUt, itSk, itr, itc, itdf, dim, rescaleIdf); File in = new File(args[6]); if (in.isFile()) { run(enLsm, itLsm, args[6], args[7]); } else if (in.isDirectory()) { FolderScanner fs = new FolderScanner(in); fs.setFiler(new TsvFilter()); int count = 0; while (fs.hasNext()) { Object[] files = fs.next(); System.out.println((count++) + " : " + files.length); for (int i = 0; i < files.length; i++) { String name = ((File) files[i]).getAbsolutePath(); System.out.println(name); run(enLsm, itLsm, name, args[7]); } // end for i } // end while } }
From source file:com.act.analysis.surfactant.SurfactantLabeler.java
public static void main(String[] args) throws Exception { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());/*w ww .j a va2 s . c o m*/ } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } File inputFile = new File(cl.getOptionValue(OPTION_INPUT_FILE)); if (!inputFile.isFile()) { System.err.format("No input file at: %s\n", inputFile.getAbsolutePath()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } File outputFile = new File(cl.getOptionValue(OPTION_OUTPUT_FILE)); if (outputFile.exists()) { System.err.format("WARNING: output file at %s already exists\n", outputFile.getAbsolutePath()); } /* Sometimes the InChIs might not appear in the input file (like in regression results). Instead a corpus of * names and InChIs can be specified in a separate file and looked up as molecules are read/visualized. The join * field is the key on which the InChI for a given row in the input file is found. */ File inchiSourceFile = null; if (cl.hasOption(OPTION_INCHI_SOURCE)) { inchiSourceFile = new File(cl.getOptionValue(OPTION_INCHI_SOURCE)); boolean err = false; if (!inchiSourceFile.isFile()) { System.err.format("No inchi source file at: %s\n", inchiSourceFile.getAbsolutePath()); err = true; } if (!cl.hasOption(OPTION_INCHI_SOURCE_JOIN_FIELD)) { System.err.format("Must specify a join field when using an inchi source file.\n"); err = true; } if (err) { HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } } SurfactantLabeler surfactantLabeler = new SurfactantLabeler(); surfactantLabeler.runAnalysis(cl.getOptionValue(OPTION_LICENSE_FILE), inputFile, outputFile, inchiSourceFile, cl.getOptionValue(OPTION_INCHI_SOURCE_JOIN_FIELD)); }
From source file:cosmos.example.BuildingPermitsExample.java
public static void main(String[] args) throws Exception { BuildingPermitsExample example = new BuildingPermitsExample(); new JCommander(example, args); File inputFile = new File(example.fileName); Preconditions.checkArgument(inputFile.exists() && inputFile.isFile() && inputFile.canRead(), "Expected " + example.fileName + " to be a readable file"); String zookeepers;//from w w w . j av a2s. c om String instanceName; Connector connector; MiniAccumuloCluster mac = null; File macDir = null; // Use the MiniAccumuloCluster is requested if (example.useMiniAccumuloCluster) { macDir = Files.createTempDir(); String password = "password"; MiniAccumuloConfig config = new MiniAccumuloConfig(macDir, password); config.setNumTservers(1); mac = new MiniAccumuloCluster(config); mac.start(); zookeepers = mac.getZooKeepers(); instanceName = mac.getInstanceName(); ZooKeeperInstance instance = new ZooKeeperInstance(instanceName, zookeepers); connector = instance.getConnector("root", new PasswordToken(password)); } else { // Otherwise connect to a running instance zookeepers = example.zookeepers; instanceName = example.instanceName; ZooKeeperInstance instance = new ZooKeeperInstance(instanceName, zookeepers); connector = instance.getConnector(example.username, new PasswordToken(example.password)); } // Instantiate an instance of Cosmos Cosmos cosmos = new CosmosImpl(zookeepers); // Create a definition for the data we want to load Store id = Store.create(connector, new Authorizations(), AscendingIndexIdentitySet.create()); // Register the definition with Cosmos so it can track its progress. cosmos.register(id); // Load all of the data from our inputFile LoadBuildingPermits loader = new LoadBuildingPermits(cosmos, id, inputFile); loader.run(); // Finalize the SortableResult which will prevent future writes to the data set cosmos.finalize(id); // Flush the ingest traces to the backend so we can see the results; id.sendTraces(); // Get back the Set of Columns that we've ingested. Set<Column> schema = Sets.newHashSet(cosmos.columns(id)); log.debug("\nColumns: " + schema); Iterator<Column> iter = schema.iterator(); while (iter.hasNext()) { Column c = iter.next(); // Remove the internal ID field and columns that begin with CONTRACTOR_ if (c.equals(LoadBuildingPermits.ID) || c.name().startsWith("CONTRACTOR_")) { iter.remove(); } } Iterable<Index> indices = Iterables.transform(schema, new Function<Column, Index>() { @Override public Index apply(Column col) { return Index.define(col); } }); // Ensure that we have locality groups set as we expect log.info("Ensure locality groups are set"); id.optimizeIndices(indices); // Compact down the data for this SortableResult log.info("Issuing compaction for relevant data"); id.consolidate(); final int numTopValues = 10; // Walk through each column in the result set for (Column c : schema) { Stopwatch sw = new Stopwatch(); sw.start(); // Get the number of times we've seen each value in a given column CloseableIterable<Entry<RecordValue<?>, Long>> groupingsInColumn = cosmos.groupResults(id, c); log.info(c.name() + ":"); // Iterate over the counts, collecting the top N values in each column TreeMap<Long, RecordValue<?>> topValues = Maps.newTreeMap(); for (Entry<RecordValue<?>, Long> entry : groupingsInColumn) { if (topValues.size() == numTopValues) { Entry<Long, RecordValue<?>> least = topValues.pollFirstEntry(); if (least.getKey() < entry.getValue()) { topValues.put(entry.getValue(), entry.getKey()); } else { topValues.put(least.getKey(), least.getValue()); } } else if (topValues.size() < numTopValues) { topValues.put(entry.getValue(), entry.getKey()); } } for (Long key : topValues.descendingKeySet()) { log.info(topValues.get(key).value() + " occurred " + key + " times"); } sw.stop(); log.info("Took " + sw.toString() + " to run query.\n"); } log.info("Deleting records"); // Delete the records we've ingested if (!example.useMiniAccumuloCluster) { // Because I'm lazy and don't want to wait around to run the BatchDeleter when we're just going // to rm -rf the directory in a few secs. cosmos.delete(id); } // And shut down Cosmos cosmos.close(); log.info("Cosmos stopped"); // If we were using MAC, also stop that if (example.useMiniAccumuloCluster && null != mac) { mac.stop(); if (null != macDir) { FileUtils.deleteDirectory(macDir); } } }