List of usage examples for java.io File getAbsolutePath
public String getAbsolutePath()
From source file:boa.compiler.BoaCompiler.java
public static void main(final String[] args) throws IOException { CommandLine cl = processCommandLineOptions(args); if (cl == null) return;/* w w w .ja v a 2s .co m*/ final ArrayList<File> inputFiles = BoaCompiler.inputFiles; // get the name of the generated class final String className = getGeneratedClass(cl); // get the filename of the jar we will be writing final String jarName; if (cl.hasOption('o')) jarName = cl.getOptionValue('o'); else jarName = className + ".jar"; // make the output directory File outputRoot = null; if (cl.hasOption("cd")) { outputRoot = new File(cl.getOptionValue("cd")); } else { outputRoot = new File(new File(System.getProperty("java.io.tmpdir")), UUID.randomUUID().toString()); } final File outputSrcDir = new File(outputRoot, "boa"); if (!outputSrcDir.mkdirs()) throw new IOException("unable to mkdir " + outputSrcDir); // find custom libs to load final List<URL> libs = new ArrayList<URL>(); if (cl.hasOption('l')) for (final String lib : cl.getOptionValues('l')) libs.add(new File(lib).toURI().toURL()); final File outputFile = new File(outputSrcDir, className + ".java"); final BufferedOutputStream o = new BufferedOutputStream(new FileOutputStream(outputFile)); try { final List<String> jobnames = new ArrayList<String>(); final List<String> jobs = new ArrayList<String>(); boolean isSimple = true; final List<Program> visitorPrograms = new ArrayList<Program>(); SymbolTable.initialize(libs); final int maxVisitors; if (cl.hasOption('v')) maxVisitors = Integer.parseInt(cl.getOptionValue('v')); else maxVisitors = Integer.MAX_VALUE; for (int i = 0; i < inputFiles.size(); i++) { final File f = inputFiles.get(i); try { final BoaLexer lexer = new BoaLexer(new ANTLRFileStream(f.getAbsolutePath())); lexer.removeErrorListeners(); lexer.addErrorListener(new LexerErrorListener()); final CommonTokenStream tokens = new CommonTokenStream(lexer); final BoaParser parser = new BoaParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(new BaseErrorListener() { @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) throws ParseCancellationException { throw new ParseCancellationException(e); } }); final BoaErrorListener parserErrorListener = new ParserErrorListener(); final Start p = parse(tokens, parser, parserErrorListener); if (cl.hasOption("ast")) new ASTPrintingVisitor().start(p); final String jobName = "" + i; try { if (!parserErrorListener.hasError) { new TypeCheckingVisitor().start(p, new SymbolTable()); final TaskClassifyingVisitor simpleVisitor = new TaskClassifyingVisitor(); simpleVisitor.start(p); LOG.info(f.getName() + ": task complexity: " + (!simpleVisitor.isComplex() ? "simple" : "complex")); isSimple &= !simpleVisitor.isComplex(); new ShadowTypeEraser().start(p); new InheritedAttributeTransformer().start(p); new LocalAggregationTransformer().start(p); // if a job has no visitor, let it have its own method // also let jobs have own methods if visitor merging is disabled if (!simpleVisitor.isComplex() || maxVisitors < 2 || inputFiles.size() == 1) { new VisitorOptimizingTransformer().start(p); if (cl.hasOption("pp")) new PrettyPrintVisitor().start(p); if (cl.hasOption("ast2")) new ASTPrintingVisitor().start(p); final CodeGeneratingVisitor cg = new CodeGeneratingVisitor(jobName); cg.start(p); jobs.add(cg.getCode()); jobnames.add(jobName); } // if a job has visitors, fuse them all together into a single program else { p.getProgram().jobName = jobName; visitorPrograms.add(p.getProgram()); } } } catch (final TypeCheckException e) { parserErrorListener.error("typecheck", lexer, null, e.n.beginLine, e.n.beginColumn, e.n2.endColumn - e.n.beginColumn + 1, e.getMessage(), e); } } catch (final Exception e) { System.err.print(f.getName() + ": compilation failed: "); e.printStackTrace(); } } if (!visitorPrograms.isEmpty()) try { for (final Program p : new VisitorMergingTransformer().mergePrograms(visitorPrograms, maxVisitors)) { new VisitorOptimizingTransformer().start(p); if (cl.hasOption("pp")) new PrettyPrintVisitor().start(p); if (cl.hasOption("ast2")) new ASTPrintingVisitor().start(p); final CodeGeneratingVisitor cg = new CodeGeneratingVisitor(p.jobName); cg.start(p); jobs.add(cg.getCode()); jobnames.add(p.jobName); } } catch (final Exception e) { System.err.println("error fusing visitors - falling back: " + e); e.printStackTrace(); for (final Program p : visitorPrograms) { new VisitorOptimizingTransformer().start(p); if (cl.hasOption("pp")) new PrettyPrintVisitor().start(p); if (cl.hasOption("ast2")) new ASTPrintingVisitor().start(p); final CodeGeneratingVisitor cg = new CodeGeneratingVisitor(p.jobName); cg.start(p); jobs.add(cg.getCode()); jobnames.add(p.jobName); } } if (jobs.size() == 0) throw new RuntimeException("no files compiled without error"); final ST st = AbstractCodeGeneratingVisitor.stg.getInstanceOf("Program"); st.add("name", className); st.add("numreducers", inputFiles.size()); st.add("jobs", jobs); st.add("jobnames", jobnames); st.add("combineTables", CodeGeneratingVisitor.combineAggregatorStrings); st.add("reduceTables", CodeGeneratingVisitor.reduceAggregatorStrings); st.add("splitsize", isSimple ? 64 * 1024 * 1024 : 10 * 1024 * 1024); if (DefaultProperties.localDataPath != null) { st.add("isLocal", true); } o.write(st.render().getBytes()); } finally { o.close(); } compileGeneratedSrc(cl, jarName, outputRoot, outputFile); }
From source file:ffx.Main.java
/** * Create an instance of Force Field X//w w w . ja va 2 s.co m * * @param args an array of {@link java.lang.String} objects. * @throws java.lang.Exception if any. */ public static void main(String[] args) throws Exception { /** * Process any "-D" command line flags. */ args = processProperties(args); /** * Configure our logging. */ startLogging(); /** * Print out help for the command line interface. */ if (GraphicsEnvironment.isHeadless() && args.length < 2) { commandLineInterfaceHelp(); } /** * Determine host name and process ID. */ environment(); /** * Start up the Parallel Java communication layer. */ startParallelJava(args); /** * Run the pKa input GUI if requested. Halts execution until GUI exits. */ /** * if (System.getProperty("pKaCalc") != null) { if * (System.getProperty("pKaCalc").equals("true")) { ffx.pka.pKaRun * runnable = new ffx.pka.pKaRun(); Thread t = new Thread(runnable,"pKa * Thread"); t.start(); t.join(); final int NUM_PKA_ARGS = 25; String[] * newArgs = new String[NUM_PKA_ARGS]; int currentArg = 0; for (int i=0; * i < newArgs.length; i++) { newArgs[currentArg] = runnable.getArg(i); * if (runnable.getArg(i) == null) { String temp = runnable.getArg(i - * 1); if (temp.startsWith("-s") || temp.startsWith("-f")) { * currentArg--; } } else { currentArg++; } } args = newArgs; } } */ // Print the header. // Moved this here so I could see the args being supplied by pKaRun. header(args); /** * Parse the specified command or structure file. */ File commandLineFile = null; int nArgs = args.length; if (nArgs > 0) { commandLineFile = new File(args[0]); // Resolve a relavtive path if (commandLineFile.exists()) { commandLineFile = new File(FilenameUtils.normalize(commandLineFile.getAbsolutePath())); } } /** * Convert the args to a List<String>. */ List<String> argList = new ArrayList<>(nArgs); if (nArgs > 1) { for (int i = 1; i < nArgs; i++) { argList.add(args[i]); } } /** * Start up the GUI or CLI version of Force Field X. */ if (!GraphicsEnvironment.isHeadless()) { startGraphicalUserInterface(commandLineFile, argList); } else { startCommandLineInterface(commandLineFile, argList); } }
From source file:com.ericsson.eiffel.remrem.semantics.clone.PrepareLocalEiffelSchemas.java
public static void main(String[] args) throws IOException { final PrepareLocalEiffelSchemas prepareLocalSchema = new PrepareLocalEiffelSchemas(); final Proxy proxy = prepareLocalSchema.getProxy(httpProxyUrl, httpProxyPort, httpProxyUsername, httpProxyPassword);/* w w w. j ava2s .c om*/ if (proxy != null) { prepareLocalSchema.setProxy(proxy); } final String eiffelRepoUrl = args[0]; final String eiffelRepoBranch = args[1]; final String operationRepoUrl = args[2]; final String operationRepoBranch = args[3]; final File localEiffelRepoPath = new File( System.getProperty(EiffelConstants.USER_HOME) + File.separator + EiffelConstants.EIFFEL); final File localOperationsRepoPath = new File(System.getProperty(EiffelConstants.USER_HOME) + File.separator + EiffelConstants.OPERATIONS_REPO_NAME); // Clone Eiffel Repo from GitHub prepareLocalSchema.cloneEiffelRepo(eiffelRepoUrl, eiffelRepoBranch, localEiffelRepoPath); //Clone Eiffel operations Repo from GitHub prepareLocalSchema.cloneEiffelRepo(operationRepoUrl, operationRepoBranch, localOperationsRepoPath); //Copy operations repo Schemas to location where Eiffel repo schemas available prepareLocalSchema.copyOperationSchemas(localOperationsRepoPath.getAbsolutePath(), localEiffelRepoPath.getAbsolutePath()); // Read and Load JsonSchemas from Cloned Directory final LocalRepo localRepo = new LocalRepo(localEiffelRepoPath); localRepo.readSchemas(); final ArrayList<String> jsonEventNames = localRepo.getJsonEventNames(); final ArrayList<File> jsonEventSchemas = localRepo.getJsonEventSchemas(); // Schema changes final SchemaFile schemaFile = new SchemaFile(); // Iterate the Each jsonSchema file to Add and Modify the necessary properties if (jsonEventNames != null && jsonEventSchemas != null) { for (int i = 0; i < jsonEventNames.size(); i++) { schemaFile.modify(jsonEventSchemas.get(i), jsonEventNames.get(i)); } } }
From source file:net.itransformers.postDiscoverer.core.ReportManager.java
public static void main(String[] args) throws IOException { File projectDir = new File("."); File scriptPath = new File("postDiscoverer/src/main/resources/postDiscoverer/conf/groovy/"); ResourceManagerFactory resourceManagerFactory = new XmlResourceManagerFactory( "iDiscover/resourceManager/xmlResourceManager/src/main/resources/xmlResourceManager/conf/xml/resource.xml"); Map<String, String> resourceManagerParams = new HashMap<>(); resourceManagerParams.put("projectPath", projectDir.getAbsolutePath()); ResourceManager resourceManager = resourceManagerFactory.createResourceManager("xml", resourceManagerParams);/* w w w. jav a2s .c o m*/ Map<String, String> params = new HashMap<String, String>(); params.put("protocol", "telnet"); params.put("deviceName", "R1"); params.put("deviceType", "CISCO"); params.put("address", "10.17.1.5"); params.put("port", "23"); ResourceType resource = resourceManager.findFirstResourceBy(params); List connectParameters = resource.getConnectionParams(); for (int i = 0; i < connectParameters.size(); i++) { ConnectionParamsType connParamsType = (ConnectionParamsType) connectParameters.get(i); String connectionType = connParamsType.getConnectionType(); if (connectionType.equalsIgnoreCase(params.get("protocol"))) { for (ParamType param : connParamsType.getParam()) { params.put(param.getName(), param.getValue()); } } } File postDiscoveryConfing = new File( projectDir + "/postDiscoverer/src/main/resources/postDiscoverer/conf/xml/reportGenerator.xml"); if (!postDiscoveryConfing.exists()) { System.out.println("File missing: " + postDiscoveryConfing.getAbsolutePath()); return; } ReportGeneratorType reportGenerator = null; FileInputStream is = new FileInputStream(postDiscoveryConfing); try { reportGenerator = JaxbMarshalar.unmarshal(ReportGeneratorType.class, is); } catch (JAXBException e) { logger.info(e); //To change body of catch statement use File | Settings | File Templates. } finally { is.close(); } ReportManager reportManager = new ReportManager(reportGenerator, scriptPath.getPath(), projectDir, "postDiscoverer/conf/xslt/table_creator.xslt"); StringBuffer report = null; HashMap<String, Object> groovyExecutorParams = new HashMap<String, Object>(); for (String s : params.keySet()) { groovyExecutorParams.put(s, params.get(s)); } try { report = reportManager.reportExecutor( new File("/Users/niau/Projects/Projects/netTransformer10/version1/post-discovery"), groovyExecutorParams); } catch (ParserConfigurationException e) { e.printStackTrace(); } catch (SAXException e) { e.printStackTrace(); } if (report != null) { System.out.println(report.toString()); } else { System.out.println("Report generation failed!"); } }
From source file:com.kotcrab.vis.editor.Main.java
public static void main(String[] args) throws Exception { App.init();/*from www .jav a2 s .c o m*/ if (OsUtils.isMac()) System.setProperty("java.awt.headless", "true"); LaunchConfiguration launchConfig = new LaunchConfiguration(); //TODO: needs some better parser for (int i = 0; i < args.length; i++) { String arg = args[i]; if (arg.equals("--scale-ui")) { launchConfig.scaleUIEnabled = true; continue; } if (arg.equals("--project")) { if (i + 1 >= args.length) { throw new IllegalStateException("Not enough parameters for --project <project path>"); } launchConfig.projectPath = args[i + 1]; i++; continue; } if (arg.equals("--scene")) { if (i + 1 >= args.length) { throw new IllegalStateException("Not enough parameters for --scene <scene path>"); } launchConfig.scenePath = args[i + 1]; i++; continue; } Log.warn("Unrecognized command line argument: " + arg); } launchConfig.verify(); editor = new Editor(launchConfig); Lwjgl3ApplicationConfiguration config = new Lwjgl3ApplicationConfiguration(); config.setWindowedMode(1280, 720); config.setWindowSizeLimits(1, 1, 9999, 9999); config.useVsync(true); config.setIdleFPS(2); config.setWindowListener(new Lwjgl3WindowAdapter() { @Override public boolean closeRequested() { editor.requestExit(); return false; } }); try { new Lwjgl3Application(editor, config); Log.dispose(); } catch (Exception e) { Log.exception(e); Log.fatal("Uncaught exception occurred, error report will be saved"); Log.flush(); if (App.eventBus != null) App.eventBus.post(new ExceptionEvent(e, true)); try { File crashReport = new CrashReporter(Log.getLogFile().file()).processReport(); if (new File(App.TOOL_CRASH_REPORTER_PATH).exists() == false) { Log.warn("Crash reporting tool not present, skipping crash report sending."); } else { CommandLine cmdLine = new CommandLine(PlatformUtils.getJavaBinPath()); cmdLine.addArgument("-jar"); cmdLine.addArgument(App.TOOL_CRASH_REPORTER_PATH); cmdLine.addArgument(ApplicationUtils.getRestartCommand().replace("\"", "%")); cmdLine.addArgument(crashReport.getAbsolutePath(), false); DefaultExecutor executor = new DefaultExecutor(); executor.setStreamHandler(new PumpStreamHandler(null, null, null)); executor.execute(cmdLine); } } catch (IOException ex) { ex.printStackTrace(); } Log.dispose(); System.exit(-3); } catch (ExceptionInInitializerError err) { if (OsUtils.isMac() && err.getCause() instanceof IllegalStateException) { if (ExceptionUtils.getStackTrace(err).contains("XstartOnFirstThread")) { System.out.println( "Application was not launched on first thread. Restarting with -XstartOnFirstThread, add VM argument -XstartOnFirstThread to avoid this."); ApplicationUtils.startNewInstance(); } } throw err; } }
From source file:net.antidot.semantic.rdf.rdb2rdf.main.Db2triples.java
public static void main(String[] args) { // Get all options Options options = new Options(); Options r2rmlOptions = new Options(); Options dmOptions = new Options(); options.addOption(modeOpt);/* ww w . j ava2 s . c o m*/ options.addOption(userNameOpt); r2rmlOptions.addOption(userNameOpt); dmOptions.addOption(userNameOpt); options.addOption(passwordOpt); r2rmlOptions.addOption(passwordOpt); dmOptions.addOption(passwordOpt); options.addOption(URLOpt); r2rmlOptions.addOption(URLOpt); dmOptions.addOption(URLOpt); options.addOption(driverOpt); r2rmlOptions.addOption(driverOpt); dmOptions.addOption(driverOpt); options.addOption(dbOpt); r2rmlOptions.addOption(dbOpt); dmOptions.addOption(dbOpt); options.addOption(baseURIOpt); r2rmlOptions.addOption(baseURIOpt); dmOptions.addOption(baseURIOpt); options.addOption(forceOpt); r2rmlOptions.addOption(forceOpt); dmOptions.addOption(forceOpt); options.addOption(nativeOpt); r2rmlOptions.addOption(nativeOpt); dmOptions.addOption(nativeOpt); options.addOption(nativeStoreNameOpt); r2rmlOptions.addOption(nativeStoreNameOpt); dmOptions.addOption(nativeStoreNameOpt); options.addOption(outputOpt); r2rmlOptions.addOption(outputOpt); dmOptions.addOption(outputOpt); options.addOption(transformSPARQLFile); dmOptions.addOption(transformSPARQLFile); options.addOption(transformOutputFile); dmOptions.addOption(transformOutputFile); options.addOption(rdfFormat); r2rmlOptions.addOption(rdfFormat); dmOptions.addOption(rdfFormat); options.addOption(versionOpt); dmOptions.addOption(versionOpt); options.addOption(r2rmlFileOpt); r2rmlOptions.addOption(r2rmlFileOpt); // Init parameters String mode = null; String userName = null; String password = null; String url = null; DriverType driver = null; String dbName = null; String baseURI = null; boolean useNativeStore = false; boolean forceExistingRep = false; String nativeOutput = null; String output = null; String sparql = null; String sparqlOutput = null; String format = null; String r2rmlFile = null; int int_version = 1; // RDF Format output RDFFormat rdfFormat = RDFFormat.TURTLE; // Turtle by default // Norm version Version version = Version.WD_20120529; // Option parsing // Create the parser CommandLineParser parser = new GnuParser(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); // Database settings // Mode if (!line.hasOption("mode")) { // automatically generate the help statement log.error("Mode is required. Use -m option to set it."); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(projectName, options); System.exit(-1); } else { mode = line.getOptionValue("mode"); if (!mode.equals("r2rml") && !mode.equals("dm")) { log.error("Unkonw mode. Please select 'r2rml' or 'dm' mode."); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(projectName, options); System.exit(-1); } } // user name if (!line.hasOption("user")) { // automatically generate the help statement log.error("User name is required. Use -u option to set it."); HelpFormatter formatter = new HelpFormatter(); if (mode.equals("r2rml")) { formatter.printHelp(projectNameR2RMLMode, r2rmlOptions); } else { formatter.printHelp(projectNameDirectMappingMode, dmOptions); } System.exit(-1); } else { userName = line.getOptionValue("user"); } // password if (!line.hasOption("pass")) { // automatically generate the help statement log.error("Password is required. Use -p option to set it."); HelpFormatter formatter = new HelpFormatter(); if (mode.equals("r2rml")) { formatter.printHelp(projectNameR2RMLMode, r2rmlOptions); } else { formatter.printHelp(projectNameDirectMappingMode, dmOptions); } System.exit(-1); } else { password = line.getOptionValue("pass"); } // Database URL url = line.getOptionValue("url", "jdbc:mysql://localhost/"); // driver driver = new DriverType(line.getOptionValue("driver", defaultDriver.getDriverName())); // Database name if (!line.hasOption("database")) { // automatically generate the help statement log.error("Database name is required. Use -b option to set it."); HelpFormatter formatter = new HelpFormatter(); if (mode.equals("r2rml")) { formatter.printHelp(projectNameR2RMLMode, r2rmlOptions); } else { formatter.printHelp(projectNameDirectMappingMode, dmOptions); } System.exit(-1); } else { dbName = line.getOptionValue("database"); } // Base URI baseURI = line.getOptionValue("base_uri", "http://foo.example/DB/"); // Use of native store ? useNativeStore = line.hasOption("n"); // Name of native store if (useNativeStore && !line.hasOption("native_output")) { // automatically generate the help statement log.error("Native triplestore path is required. Use -n option to set it."); HelpFormatter formatter = new HelpFormatter(); if (mode.equals("r2rml")) { formatter.printHelp(projectNameR2RMLMode, r2rmlOptions); } else { formatter.printHelp(projectNameDirectMappingMode, dmOptions); } System.exit(-1); } else { nativeOutput = line.getOptionValue("native_output"); } // Force loading of repository forceExistingRep = line.hasOption("f"); // Output output = line.getOptionValue("output", "output.ttl"); // SPARQL transformation if (line.hasOption("sparql")) { if (!mode.equals("dm")) { log.warn("sparql option is required only for 'dm' mode : it will be ignored..."); } else { sparql = line.getOptionValue("sparql"); sparqlOutput = line.getOptionValue("sparql_output", "output_sparql.ttl"); } } // RDF Format if (line.hasOption("format")) { format = line.getOptionValue("format"); if (format.equals("TURTLE")) rdfFormat = RDFFormat.TURTLE; else if (format.equals("RDFXML")) rdfFormat = RDFFormat.RDFXML; else if (format.equals("NTRIPLES")) rdfFormat = RDFFormat.NTRIPLES; else if (!format.equals("N3")) { log.error("Unknown RDF format. Please use RDFXML, TURTLE, N3 or NTRIPLES."); HelpFormatter formatter = new HelpFormatter(); if (mode.equals("r2rml")) { formatter.printHelp(projectNameR2RMLMode, r2rmlOptions); } else { formatter.printHelp(projectNameDirectMappingMode, dmOptions); } System.exit(-1); } } // Norm version if (line.hasOption("version")) { if (!mode.equals("dm")) { log.warn("version option is required only for 'dm' mode : it will be ignored..."); } switch (int_version) { case 1: version = Version.WD_20120529; break; case 2: version = Version.WD_20110324; // Check DB compatibilities if (!(driver.equals(DriverType.MysqlDriver) || driver.equals(DriverType.PostgreSQL))) { log.error( "Db2triples in Direct Mapping mode does'nt support this driver for the Working Draft" + " of 23 March 2011 (only MySQL and PostGreSQL for this time). " + "You can set the version option to select Working Draft of 20 September 2011."); System.exit(-1); } break; default: break; } } // r2rml instance if (mode.equals("r2rml")) { if (!line.hasOption("r2rml_file")) { log.error("R2RML config file is required. Use -r option to set it."); // automatically generate the help statement HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(projectNameR2RMLMode, r2rmlOptions); System.exit(-1); } else { r2rmlFile = line.getOptionValue("r2rml_file"); File r2rmlFileTest = new File(r2rmlFile); if (!r2rmlFileTest.exists()) { log.error("R2RML file does not exists."); System.exit(-1); } } } } catch (ParseException exp) { // oops, something went wrong log.error("Parsing failed. Reason : " + exp.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(projectName, options); System.exit(-1); } // Open test database Connection conn = null; try { // Connect database conn = SQLConnector.connect(userName, password, url + dbName, driver); // Generate RDF graph SesameDataSet g = null; // Check nature of storage (memory by default) if (useNativeStore) { File pathToNativeOutputDir = new File(nativeOutput); if (pathToNativeOutputDir.exists() && !forceExistingRep) { log.error("Directory " + pathToNativeOutputDir + " already exists. Use -f option to force loading of existing repository."); System.exit(-1); } // Extract database model according to convert mode if (mode.equals("r2rml")) { g = R2RMLProcessor.convertDatabase(conn, r2rmlFile, baseURI, nativeOutput, driver); } else { g = DirectMapper.generateDirectMapping(conn, version, driver, baseURI, null, nativeOutput); } } else { File outputFile = new File(output); if (outputFile.exists() && !forceExistingRep) { log.error("Output file " + outputFile.getAbsolutePath() + " already exists. Please remove it or modify ouput name option."); System.exit(-1); } // Extract database model if (mode.equals("r2rml")) { g = R2RMLProcessor.convertDatabase(conn, r2rmlFile, baseURI, driver); } else { g = DirectMapper.generateDirectMapping(conn, version, driver, baseURI, null, null); } // Dump graph log.info("Serialize RDF graph..."); g.dumpRDF(output, rdfFormat); log.info("RDF graph serialized into " + outputFile.getAbsolutePath()); } if (sparql != null && mode.equals("dm")) { log.info("Execute SPARQL transformation..."); Long start = System.currentTimeMillis(); String result = g.runSPARQLFromFile(sparql, rdfFormat); SesameDataSet gResult = new SesameDataSet(); gResult.addString(result, rdfFormat); gResult.dumpRDF(sparqlOutput, rdfFormat); Float stop = Float.valueOf(System.currentTimeMillis() - start) / 1000; log.info("Direct Mapping SPARQL query executed in " + stop + " seconds."); log.info("[DirectMapping:main] Number of triples after transformation : " + gResult.getSize()); } } catch (Exception e) { e.printStackTrace(); } finally { try { // Close db connection conn.close(); } catch (SQLException e) { e.printStackTrace(); } } }
From source file:au.org.ala.names.search.DwcaNameIndexer.java
/** * Example run//from w w w. java 2 s . c o m * * java cp .:names.jar au.org.ala.checklist.lucene.DwcaNameIndexer * -all * -dwca /data/bie-staging/names-lists/dwca-col * -target /data/lucene/testdwc-namematching * -irmng /data/bie-staging/irmng/IRMNG_DWC_HOMONYMS * -common /data/bie-staging/ala-names/col_vernacular.txt * * @param args */ public static void main(String[] args) { final String DEFAULT_DWCA = "/data/lucene/sources/dwca-col"; final String DEFAULT_IRMNG = "/data/lucene/sources/IRMNG_DWC_HOMONYMS"; final String DEFAULT_COMMON_NAME = "/data/lucene/sources/col_vernacular.txt"; final String DEFAULT_TARGET_DIR = "/data/lucene/namematching"; final String DEFAULT_TMP_DIR = "/data/lucene/nmload-tmp"; Options options = new Options(); options.addOption("v", "version", false, "Retrieve version information"); options.addOption("h", "help", false, "Retrieve options"); options.addOption("all", false, "Generates the load index and search index"); options.addOption("load", false, "Generate the load index only. " + "The load index is a temporary index generated from the raw data files" + " used to load the main search index"); options.addOption("search", false, "Generates the search index. A load index must already be created for this to run."); options.addOption("irmng", true, "The absolute path to the unzipped irmng DwCA. IRMNG is used to detect homonyms. Defaults to " + DEFAULT_IRMNG); options.addOption("dwca", true, "The absolute path to the unzipped DwCA for the scientific names. Defaults to " + DEFAULT_DWCA); options.addOption("target", true, "The target directory to write the new name index to. Defaults to " + DEFAULT_TARGET_DIR); options.addOption("tmp", true, "The tmp directory for the load index. Defaults to " + DEFAULT_TMP_DIR); options.addOption("common", true, "The common (vernacular) name file. Defaults to " + DEFAULT_COMMON_NAME); options.addOption("testSearch", true, "Debug a name search. This uses the target directory to search against."); CommandLineParser parser = new BasicParser(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); if (line.hasOption("v")) { //only load the properties file if it exists otherwise default to the biocache-test-config.properties on the classpath InputStream stream = DwcaNameIndexer.class.getResourceAsStream("/git.properties"); Properties properties = new Properties(); if (stream != null) { properties.load(stream); properties.list(System.out); } else { System.err.println("Unable to retrieve versioning information"); } System.exit(-1); } if (line.hasOption("help")) { //only load the properties file if it exists otherwise default to the biocache-test-config.properties on the classpath new HelpFormatter().printHelp("nameindexer", options); System.exit(-1); } if (line.hasOption("testSearch")) { boolean indexExists = (new File(DEFAULT_TARGET_DIR).exists()); if (indexExists) { //do a name search - with option flag pointing to index location System.out.println("Search for name"); ALANameSearcher searcher = new ALANameSearcher( line.getOptionValue("target", DEFAULT_TARGET_DIR)); NameSearchResult nsr = searcher.searchForRecord(line.getOptionValue("testSearch")); if (nsr != null) { Map<String, String> props = nsr.toMap(); for (Map.Entry<String, String> entry : props.entrySet()) { System.out.println(entry.getKey() + ": " + entry.getValue()); } } else { System.err.println("No match for " + line.getOptionValue("testSearch")); } } else { System.err.println("Index unreadable. Check " + DEFAULT_TARGET_DIR); } System.exit(-1); } boolean load = line.hasOption("load") || line.hasOption("all"); boolean search = line.hasOption("search") || line.hasOption("all"); if (!line.hasOption("load") && !line.hasOption("search") && !line.hasOption("all")) { load = true; search = true; } log.info("Generating loading index: " + load); log.info("Generating searching index: " + search); boolean defaultIrmngReadable = (new File(DEFAULT_IRMNG).exists()); boolean defaultCommonReadable = (new File(DEFAULT_COMMON_NAME).exists()); boolean defaultDwcaReadable = (new File(DEFAULT_DWCA).exists()); if (line.getOptionValue("dwca") != null) { log.info("Using the DwCA name file: " + line.getOptionValue("dwca")); } else if (defaultDwcaReadable) { log.info("Using the default DwCA name file: " + DEFAULT_DWCA); } else { log.error( "No DwC Archive specified and the default file path does not exist or is inaccessible. Default path: " + DEFAULT_DWCA); System.exit(-1); } if (line.getOptionValue("irmng") == null && !defaultIrmngReadable) { log.warn( "No IRMNG export specified and the default file path does not exist or is inaccessible. Default path: " + DEFAULT_IRMNG); } else { log.info("Using the default IRMNG name file: " + DEFAULT_IRMNG); } if (line.getOptionValue("common") == null && !defaultCommonReadable) { log.warn( "No common name export specified and the default file path does not exist or is inaccessible. Default path: " + DEFAULT_COMMON_NAME); } else { log.info("Using the default common name file: " + DEFAULT_COMMON_NAME); } File targetDirectory = new File(line.getOptionValue("target", DEFAULT_TARGET_DIR)); if (targetDirectory.exists()) { String newPath = targetDirectory.getAbsolutePath() + "_" + DateFormatUtils.format(new Date(), "yyyy-MM-dd_hh-mm-ss"); log.info("Target directory already exists. Backing up to : " + newPath); File newTargetDirectory = new File(newPath); FileUtils.moveDirectory(targetDirectory, newTargetDirectory); FileUtils.forceMkdir(targetDirectory); } DwcaNameIndexer indexer = new DwcaNameIndexer(); indexer.create(load, search, line.getOptionValue("target", DEFAULT_TARGET_DIR), line.getOptionValue("tmp", DEFAULT_TMP_DIR), line.getOptionValue("dwca", DEFAULT_DWCA), line.getOptionValue("irmng", DEFAULT_IRMNG), line.getOptionValue("common", DEFAULT_COMMON_NAME)); } catch (Exception e) { e.printStackTrace(); } }
From source file:io.apicurio.studio.tools.release.ReleaseTool.java
/** * Main method.//from ww w .j a v a 2 s .c o m * @param args */ public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption("n", "release-name", true, "The name of the new release."); options.addOption("p", "prerelease", false, "Indicate that this is a pre-release."); options.addOption("t", "release-tag", true, "The tag name of the new release."); options.addOption("o", "previous-tag", true, "The tag name of the previous release."); options.addOption("g", "github-pat", true, "The GitHub PAT (for authentication/authorization)."); options.addOption("a", "artifact", true, "The binary release artifact (full path)."); options.addOption("d", "output-directory", true, "Where to store output file(s)."); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); if (!cmd.hasOption("n") || !cmd.hasOption("t") || !cmd.hasOption("o") || !cmd.hasOption("g") || !cmd.hasOption("a")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("release-studio", options); System.exit(1); } // Arguments (command line) String releaseName = cmd.getOptionValue("n"); boolean isPrerelease = cmd.hasOption("p"); String releaseTag = cmd.getOptionValue("t"); String oldReleaseTag = cmd.getOptionValue("o"); String githubPAT = cmd.getOptionValue("g"); String artifact = cmd.getOptionValue("a"); File outputDir = new File(""); if (cmd.hasOption("d")) { outputDir = new File(cmd.getOptionValue("d")); if (!outputDir.exists()) { outputDir.mkdirs(); } } File releaseArtifactFile = new File(artifact); File releaseArtifactSigFile = new File(artifact + ".asc"); String releaseArtifact = releaseArtifactFile.getName(); String releaseArtifactSig = releaseArtifactSigFile.getName(); if (!releaseArtifactFile.isFile()) { System.err.println("Missing file: " + releaseArtifactFile.getAbsolutePath()); System.exit(1); } if (!releaseArtifactSigFile.isFile()) { System.err.println("Missing file: " + releaseArtifactSigFile.getAbsolutePath()); System.exit(1); } System.out.println("========================================="); System.out.println("Creating Release: " + releaseTag); System.out.println("Previous Release: " + oldReleaseTag); System.out.println(" Name: " + releaseName); System.out.println(" Artifact: " + releaseArtifact); System.out.println(" Pre-Release: " + isPrerelease); System.out.println("========================================="); String releaseNotes = ""; // Step #1 - Generate Release Notes // * Grab info about the previous release (extract publish date) // * Query all Issues for ones closed since that date // * Generate Release Notes from the resulting Issues try { System.out.println("Getting info about release " + oldReleaseTag); HttpResponse<JsonNode> response = Unirest .get("https://api.github.com/repos/apicurio/apicurio-studio/releases/tags/v" + oldReleaseTag) .header("Accept", "application/json").header("Authorization", "token " + githubPAT).asJson(); if (response.getStatus() != 200) { throw new Exception("Failed to get old release info: " + response.getStatusText()); } JsonNode body = response.getBody(); String publishedDate = body.getObject().getString("published_at"); if (publishedDate == null) { throw new Exception("Could not find Published Date for previous release " + oldReleaseTag); } System.out.println("Release " + oldReleaseTag + " was published on " + publishedDate); List<JSONObject> issues = getIssuesForRelease(publishedDate, githubPAT); System.out.println("Found " + issues.size() + " issues closed in release " + releaseTag); System.out.println("Generating Release Notes"); releaseNotes = generateReleaseNotes(releaseName, releaseTag, issues); System.out.println("------------ Release Notes --------------"); System.out.println(releaseNotes); System.out.println("-----------------------------------------"); } catch (Exception e) { e.printStackTrace(); System.exit(1); } String assetUploadUrl = null; // Step #2 - Create a GitHub Release try { System.out.println("\nCreating GitHub Release " + releaseTag); JSONObject body = new JSONObject(); body.put("tag_name", "v" + releaseTag); body.put("name", releaseName); body.put("body", releaseNotes); body.put("prerelease", isPrerelease); HttpResponse<JsonNode> response = Unirest .post("https://api.github.com/repos/apicurio/apicurio-studio/releases") .header("Accept", "application/json").header("Content-Type", "application/json") .header("Authorization", "token " + githubPAT).body(body).asJson(); if (response.getStatus() != 201) { throw new Exception("Failed to create release in GitHub: " + response.getStatusText()); } assetUploadUrl = response.getBody().getObject().getString("upload_url"); if (assetUploadUrl == null || assetUploadUrl.trim().isEmpty()) { throw new Exception("Failed to get Asset Upload URL for newly created release!"); } } catch (Exception e) { e.printStackTrace(); System.exit(1); } // Step #3 - Upload Release Artifact (zip file) System.out.println("\nUploading Quickstart Artifact: " + releaseArtifact); try { String artifactUploadUrl = createUploadUrl(assetUploadUrl, releaseArtifact); byte[] artifactData = loadArtifactData(releaseArtifactFile); System.out.println("Uploading artifact asset: " + artifactUploadUrl); HttpResponse<JsonNode> response = Unirest.post(artifactUploadUrl).header("Accept", "application/json") .header("Content-Type", "application/zip").header("Authorization", "token " + githubPAT) .body(artifactData).asJson(); if (response.getStatus() != 201) { throw new Exception("Failed to upload asset: " + releaseArtifact, new Exception(response.getStatus() + "::" + response.getStatusText())); } Thread.sleep(1000); artifactUploadUrl = createUploadUrl(assetUploadUrl, releaseArtifactSig); artifactData = loadArtifactData(releaseArtifactSigFile); System.out.println("Uploading artifact asset: " + artifactUploadUrl); response = Unirest.post(artifactUploadUrl).header("Accept", "application/json") .header("Content-Type", "text/plain").header("Authorization", "token " + githubPAT) .body(artifactData).asJson(); if (response.getStatus() != 201) { throw new Exception("Failed to upload asset: " + releaseArtifactSig, new Exception(response.getStatus() + "::" + response.getStatusText())); } } catch (Exception e) { e.printStackTrace(); System.exit(1); } Thread.sleep(1000); // Step #4 - Download Latest Release JSON for inclusion in the project web site try { System.out.println("Getting info about the release."); HttpResponse<JsonNode> response = Unirest .get("https://api.github.com/repos/apicurio/apicurio-studio/releases/latest") .header("Accept", "application/json").asJson(); if (response.getStatus() != 200) { throw new Exception("Failed to get release info: " + response.getStatusText()); } JsonNode body = response.getBody(); String publishedDate = body.getObject().getString("published_at"); if (publishedDate == null) { throw new Exception("Could not find Published Date for release."); } String fname = publishedDate.replace(':', '-'); File outFile = new File(outputDir, fname + ".json"); System.out.println("Writing latest release info to: " + outFile.getAbsolutePath()); String output = body.getObject().toString(4); try (FileOutputStream fos = new FileOutputStream(outFile)) { fos.write(output.getBytes("UTF-8")); fos.flush(); } System.out.println("Release info successfully written."); } catch (Exception e) { e.printStackTrace(); System.exit(1); } System.out.println("========================================="); System.out.println("All Done!"); System.out.println("========================================="); }
From source file:at.tuwien.ifs.feature.evaluation.SimilarityRetrievalWriter.java
public static void main(String[] args) throws SOMToolboxException, IOException { // register and parse all options JSAPResult config = OptionFactory.parseResults(args, OPTIONS); File inputVectorFile = config.getFile("inputVectorFile"); String outputDirStr = AbstractOptionFactory.getFilePath(config, "outputDirectory"); File outputDirBase = new File(outputDirStr); outputDirBase.mkdirs();//from ww w . ja v a 2 s .c om String metricName = config.getString("metric"); DistanceMetric metric = AbstractMetric.instantiateNice(metricName); int neighbours = config.getInt("numberNeighbours"); int startIndex = config.getInt("startIndex"); int numberItems = config.getInt("numberItems", -1); try { SOMLibSparseInputData data = new SOMLibSparseInputData(inputVectorFile.getAbsolutePath()); int endIndex = data.numVectors(); if (numberItems != -1) { if (startIndex + numberItems > endIndex) { System.out.println("Specified number of items (" + numberItems + ") exceeds maximum (" + data.numVectors() + "), limiting to " + (endIndex - startIndex) + "."); } else { endIndex = startIndex + numberItems; } } StdErrProgressWriter progress = new StdErrProgressWriter(endIndex - startIndex, "processing vector "); // SortedSet<InputDistance> distances; for (int inputDatumIndex = startIndex; inputDatumIndex < endIndex; inputDatumIndex++) { InputDatum inputDatum = data.getInputDatum(inputDatumIndex); String inputLabel = inputDatum.getLabel(); if (inputDatumIndex == -1) { throw new IllegalArgumentException( "Input with label '" + inputLabel + "' not found in vector file '" + inputVectorFile + "'; possible labels are: " + StringUtils.toString(data.getLabels(), 15)); } File outputDir = new File(outputDirBase, inputLabel.charAt(2) + "/" + inputLabel.charAt(3) + "/" + inputLabel.charAt(4)); outputDir.mkdirs(); File outputFile = new File(outputDir, inputLabel + ".txt"); boolean fileExistsAndValid = false; if (outputFile.exists()) { // check if it the valid data String linesInvalid = ""; int validLineCount = 0; ArrayList<String> lines = FileUtils.readLinesAsList(outputFile.getAbsolutePath()); for (String string : lines) { if (string.trim().length() == 0) { continue; } String[] parts = string.split("\t"); if (parts.length != 2) { linesInvalid += "Line '" + string + "' invalid - contains " + parts.length + " elements.\n"; } else if (!NumberUtils.isNumber(parts[1])) { linesInvalid = "Line '" + string + "' invalid - 2nd part is not a number.\n"; } else { validLineCount++; } } if (validLineCount != neighbours) { linesInvalid = "Not enough valid lines; expected " + neighbours + ", found " + validLineCount + ".\n"; } fileExistsAndValid = true; if (org.apache.commons.lang.StringUtils.isNotBlank(linesInvalid)) { System.out.println("File " + outputFile.getAbsolutePath() + " exists, but is not valid:\n" + linesInvalid); } } if (fileExistsAndValid) { Logger.getLogger("at.tuwien.ifs.feature.evaluation").finer( "File " + outputFile.getAbsolutePath() + " exists and is valid; not recomputing"); } else { PrintWriter p = new PrintWriter(outputFile); SmallestElementSet<InputDistance> distances = data.getNearestDistances(inputDatumIndex, neighbours, metric); for (InputDistance inputDistance : distances) { p.println(inputDistance.getInput().getLabel() + "\t" + inputDistance.getDistance()); } p.close(); } progress.progress(); } } catch (IllegalArgumentException e) { System.out.println(e.getMessage() + ". Aborting."); System.exit(-1); } }
From source file:com.alexoree.jenkins.Main.java
public static void main(String[] args) throws Exception { // create Options object Options options = new Options(); options.addOption("t", false, "throttle the downloads, waits 5 seconds in between each d/l"); // automatically generate the help statement HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("jenkins-sync", options); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); boolean throttle = cmd.hasOption("t"); String plugins = "https://updates.jenkins-ci.org/latest/"; List<String> ps = new ArrayList<String>(); Document doc = Jsoup.connect(plugins).get(); for (Element file : doc.select("td a")) { //System.out.println(file.attr("href")); if (file.attr("href").endsWith(".hpi") || file.attr("href").endsWith(".war")) { ps.add(file.attr("href")); }//from w w w . java2 s. co m } File root = new File("."); //https://updates.jenkins-ci.org/latest/AdaptivePlugin.hpi new File("./latest").mkdirs(); //output zip file String zipFile = "jenkinsSync.zip"; // create byte buffer byte[] buffer = new byte[1024]; FileOutputStream fos = new FileOutputStream(zipFile); ZipOutputStream zos = new ZipOutputStream(fos); //download the plugins for (int i = 0; i < ps.size(); i++) { System.out.println("[" + i + "/" + ps.size() + "] downloading " + plugins + ps.get(i)); String outputFile = download(root.getAbsolutePath() + "/latest/" + ps.get(i), plugins + ps.get(i)); FileInputStream fis = new FileInputStream(outputFile); // begin writing a new ZIP entry, positions the stream to the start of the entry data zos.putNextEntry(new ZipEntry(outputFile.replace(root.getAbsolutePath(), "") .replace("updates.jenkins-ci.org/", "").replace("https:/", ""))); int length; while ((length = fis.read(buffer)) > 0) { zos.write(buffer, 0, length); } zos.closeEntry(); fis.close(); if (throttle) Thread.sleep(WAIT); new File(root.getAbsolutePath() + "/latest/" + ps.get(i)).deleteOnExit(); } //download the json metadata plugins = "https://updates.jenkins-ci.org/"; ps = new ArrayList<String>(); doc = Jsoup.connect(plugins).get(); for (Element file : doc.select("td a")) { //System.out.println(file.attr("href")); if (file.attr("href").endsWith(".json")) { ps.add(file.attr("href")); } } for (int i = 0; i < ps.size(); i++) { download(root.getAbsolutePath() + "/" + ps.get(i), plugins + ps.get(i)); FileInputStream fis = new FileInputStream(root.getAbsolutePath() + "/" + ps.get(i)); // begin writing a new ZIP entry, positions the stream to the start of the entry data zos.putNextEntry(new ZipEntry(plugins + ps.get(i))); int length; while ((length = fis.read(buffer)) > 0) { zos.write(buffer, 0, length); } zos.closeEntry(); fis.close(); new File(root.getAbsolutePath() + "/" + ps.get(i)).deleteOnExit(); if (throttle) Thread.sleep(WAIT); } // close the ZipOutputStream zos.close(); }