List of usage examples for java.lang StringBuffer toString
@Override @HotSpotIntrinsicCandidate public synchronized String toString()
From source file:grnet.filter.XMLFiltering.java
public static void main(String[] args) throws IOException { // TODO Auto-generated method ssstub Enviroment enviroment = new Enviroment(args[0]); if (enviroment.envCreation) { Core core = new Core(); XMLSource source = new XMLSource(args[0]); File sourceFile = source.getSource(); if (sourceFile.exists()) { Collection<File> xmls = source.getXMLs(); System.out.println("Filtering repository:" + enviroment.dataProviderFilteredIn.getName()); System.out.println("Number of files to filter:" + xmls.size()); Iterator<File> iterator = xmls.iterator(); FilteringReport report = null; if (enviroment.getArguments().getProps().getProperty(Constants.createReport) .equalsIgnoreCase("true")) { report = new FilteringReport(enviroment.getArguments().getDestFolderLocation(), enviroment.getDataProviderFilteredIn().getName()); }//from www. ja v a 2 s . c om ConnectionFactory factory = new ConnectionFactory(); factory.setHost(enviroment.getArguments().getQueueHost()); factory.setUsername(enviroment.getArguments().getQueueUserName()); factory.setPassword(enviroment.getArguments().getQueuePassword()); while (iterator.hasNext()) { StringBuffer logString = new StringBuffer(); logString.append(enviroment.dataProviderFilteredIn.getName()); File xmlFile = iterator.next(); String name = xmlFile.getName(); name = name.substring(0, name.indexOf(".xml")); logString.append(" " + name); boolean xmlIsFilteredIn = core.filterXML(xmlFile, enviroment.getArguments().getQueries()); if (xmlIsFilteredIn) { logString.append(" " + "FilteredIn"); slf4jLogger.info(logString.toString()); Connection connection = factory.newConnection(); Channel channel = connection.createChannel(); channel.queueDeclare(QUEUE_NAME, false, false, false, null); channel.basicPublish("", QUEUE_NAME, null, logString.toString().getBytes()); channel.close(); connection.close(); try { if (report != null) { report.appendXMLFileNameNStatus(xmlFile.getPath(), Constants.filteredInData); report.raiseFilteredInFilesNum(); } FileUtils.copyFileToDirectory(xmlFile, enviroment.getDataProviderFilteredIn()); } catch (IOException e) { // TODO Auto-generated catch block // e.printStackTrace(); e.printStackTrace(); System.out.println("Filtering failed."); } } else { logString.append(" " + "FilteredOut"); slf4jLogger.info(logString.toString()); Connection connection = factory.newConnection(); Channel channel = connection.createChannel(); channel.queueDeclare(QUEUE_NAME, false, false, false, null); channel.basicPublish("", QUEUE_NAME, null, logString.toString().getBytes()); channel.close(); connection.close(); try { if (report != null) { report.appendXMLFileNameNStatus(xmlFile.getPath(), Constants.filteredOutData); report.raiseFilteredOutFilesNum(); } FileUtils.copyFileToDirectory(xmlFile, enviroment.getDataProviderFilteredOuT()); } catch (IOException e) { // TODO Auto-generated catch block // e.printStackTrace(); e.printStackTrace(); System.out.println("Filtering failed."); } } } if (report != null) { report.appendXPathExpression(enviroment.getArguments().getQueries()); report.appendGeneralInfo(); } System.out.println("Filtering is done."); } } }
From source file:Main.java
public static void main(String[] args) { String regex = "\\b\\d+\\b"; StringBuffer sb = new StringBuffer(); String replacementText = ""; String matchedText = ""; String text = "We have 7 tutorials for Java, 2 tutorials for Javascript and 1 tutorial for Oracle."; Pattern p = Pattern.compile(regex); Matcher m = p.matcher(text);/*w w w .ja va 2 s .c om*/ while (m.find()) { matchedText = m.group(); int num = Integer.parseInt(matchedText); if (num == 1) { replacementText = "only one"; } else if (num < 5) { replacementText = "a few"; } else { replacementText = "many"; } m.appendReplacement(sb, replacementText); } m.appendTail(sb); System.out.println("Old Text: " + text); System.out.println("New Text: " + sb.toString()); }
From source file:com.curiousby.baoyou.cn.hadoop.HDFSUtils.java
public static void main(String[] args) throws IOException { String hdfsRoot = "/user/hadoop/storm"; // System.setProperty("hadoop.home.dir", "I:\\software\\hadoop-2.6.0"); HDFSUtils hdfs = new HDFSUtils(); hdfs.init();/*from ww w . jav a2 s.c o m*/ System.out.println(hdfs.dir("/")); System.out.println("================1=============="); String terminalPath = hdfsRoot + "/" + "terminal_" + "108999000003"; boolean existsTerminalPath = hdfs.exists(terminalPath); if (!existsTerminalPath) { hdfs.createDir(terminalPath); hdfs.close(); hdfs.init(); } System.out.println("================2=============="); String terminalTodayPath = terminalPath + "/temialinfo." + DatetimeUtil.getYMD(new Date()) + ".log"; boolean existsTerminalTodayPath = hdfs.exists(terminalTodayPath); if (!existsTerminalTodayPath) { hdfs.create(terminalTodayPath); } System.out.println("================3==============="); StringBuffer sb = new StringBuffer(); sb.append( "108999000003,2016-12-27 17:20:05,863360028147399,0c3631303038393039393030303030303030333030,460010430511343,MI 3W,Xiaomi,MMSHANGCHENG,108999000003-Android,unknown,2.0,2,Android,Android,unknown,unknown,8,1920*1080,Wi-Fi,unknown,unknown,ARMv7 Processor rev 1 (v7l)"); System.out.println("================4==============="); hdfs.writeAppendFile(terminalTodayPath, sb.toString()); System.out.println("================5==============="); hdfs.close(); }
From source file:com.welocalize.dispatcherMW.client.Main.java
public static void main(String[] args) throws InterruptedException, IOException { if (args.length >= 3) { String type = args[0];// w w w. ja va2 s . co m if (TYPE_TRANSLATE.equalsIgnoreCase(type)) { setbasicURl(args[1]); doJob(args[2], args[3]); return; } else if (TYPE_CHECK_STATUS.equalsIgnoreCase(type)) { setbasicURl(args[1]); checkJobStaus(args[2]); return; } else if (TYPE_DOWNLOAD.equalsIgnoreCase(type)) { setbasicURl(args[1]); downloadJob(args[2], args[3]); return; } } else if (args.length == 1) { Properties properties = new Properties(); properties.load(new FileInputStream(args[0])); String type = properties.getProperty("type"); setbasicURl(properties.getProperty("URL")); String securityCode = properties.getProperty(JSONPN_SECURITY_CODE); String filePath = properties.getProperty("filePath"); String jobID = properties.getProperty(JSONPN_JOBID); if (TYPE_TRANSLATE.equalsIgnoreCase(type)) { doJob(securityCode, filePath); return; } else if (TYPE_CHECK_STATUS.equalsIgnoreCase(type)) { String status = checkJobStaus(jobID); System.out.println("The Status of Job:" + jobID + " is " + status + ". "); return; } else if (TYPE_DOWNLOAD.equalsIgnoreCase(type)) { downloadJob(jobID, securityCode); System.out.println("Download Job:" + jobID); return; } } // Print Help Message StringBuffer msg = new StringBuffer(); msg.append("The Input is incorrect.").append("\n"); msg.append("If you want to translate the XLF file, use this command:").append("\n"); msg.append(" translate $URL $securityCode $filePath").append("\n"); msg.append("If you only want to check job status, use this command:").append("\n"); msg.append(" checkStatus $URL $jobID").append("\n"); msg.append("If you only want to download the job file, use this command:").append("\n"); msg.append(" download $URL $jobID $securityCode").append("\n"); System.out.println(msg.toString()); }
From source file:discovery.compression.kdd2011.ratio.RatioCompressionReport.java
public static void main(String[] args) throws GraphReadingException, IOException, java.text.ParseException { opts.addOption("r", true, "Goal compression ratio"); // opts.addOption( "a", // true, // "Algorithm used for compression. The default and only currently available option is \"greedy\""); //opts.addOption("cost-output",true,"Output file for costs, default is costs.txt"); //opts.addOption("cost-format",true,"Output format for "); opts.addOption("ctype", true, "Connectivity type: global or local, default is global."); opts.addOption("connectivity", false, "enables output for connectivity. Connectivity info will be written to connectivity.txt"); opts.addOption("output_bmg", true, "Write bmg file with groups to given file."); opts.addOption("algorithm", true, "Algorithm to use, one of: greedy random1 random2 bruteforce slowgreedy"); opts.addOption("hop2", false, "Only try to merge nodes that have common neighbors"); opts.addOption("kmedoids", false, "Enables output for kmedoids clustering"); opts.addOption("kmedoids_k", true, "Number of clusters to be used in kmedoids. Default is 3"); opts.addOption("kmedoids_output", true, "Output file for kmedoid clusters. Default is clusters.txt. This file will be overwritten."); opts.addOption("norefresh", false, "Use old style merging: all connectivities are not refreshed when merging"); opts.addOption("edge_attribute", true, "Attribute from bmgraph used as edge weight"); opts.addOption("only_times", false, "Only write times.txt"); //opts.addOption("no_metrics",false,"Exit after compression, don't calculate any metrics or produce output bmg for the compression."); CommandLineParser parser = new PosixParser(); CommandLine cmd = null;// w ww . j a v a2 s.c o m try { cmd = parser.parse(opts, args); } catch (ParseException e) { e.printStackTrace(); System.exit(0); } boolean connectivity = false; double ratio = 0; boolean hop2 = cmd.hasOption("hop2"); RatioCompression compression = new GreedyRatioCompression(hop2); if (cmd.hasOption("connectivity")) connectivity = true; ConnectivityType ctype = ConnectivityType.GLOBAL; CompressionMergeModel mergeModel = new PathAverageMergeModel(); if (cmd.hasOption("ctype")) { String ctypeStr = cmd.getOptionValue("ctype"); if (ctypeStr.equals("local")) { ctype = ConnectivityType.LOCAL; mergeModel = new EdgeAverageMergeModel(); } else if (ctypeStr.equals("global")) { ctype = ConnectivityType.GLOBAL; mergeModel = new PathAverageMergeModel(); } else { System.out.println(PROGRAM_NAME + ": unknown connectivity type " + ctypeStr); printHelp(); } } if (cmd.hasOption("norefresh")) mergeModel = new PathAverageMergeModelNorefresh(); if (cmd.hasOption("algorithm")) { String alg = cmd.getOptionValue("algorithm"); if (alg.equals("greedy")) { compression = new GreedyRatioCompression(hop2); } else if (alg.equals("random1")) { compression = new RandomRatioCompression(hop2); } else if (alg.equals("random2")) { compression = new SmartRandomRatioCompression(hop2); } else if (alg.equals("bruteforce")) { compression = new BruteForceCompression(hop2, ctype == ConnectivityType.LOCAL); } else if (alg.equals("slowgreedy")) { compression = new SlowGreedyRatioCompression(hop2); } else { System.out.println("algorithm must be one of: greedy random1 random2 bruteforce slowgreedy"); printHelp(); } } compression.setMergeModel(mergeModel); if (cmd.hasOption("r")) { ratio = Double.parseDouble(cmd.getOptionValue("r")); } else { System.out.println(PROGRAM_NAME + ": compression ratio not defined"); printHelp(); } if (cmd.hasOption("help")) { printHelp(); } String infile = null; if (cmd.getArgs().length != 0) { infile = cmd.getArgs()[0]; } else { printHelp(); } boolean kmedoids = false; int kmedoidsK = 3; String kmedoidsOutput = "clusters.txt"; if (cmd.hasOption("kmedoids")) kmedoids = true; if (cmd.hasOption("kmedoids_k")) kmedoidsK = Integer.parseInt(cmd.getOptionValue("kmedoids_k")); if (cmd.hasOption("kmedoids_output")) kmedoidsOutput = cmd.getOptionValue("kmedoids_output"); String edgeAttrib = "goodness"; if (cmd.hasOption("edge_attribute")) edgeAttrib = cmd.getOptionValue("edge_attribute"); // This program should directly use bmgraph-java to read and // DefaultGraph should have a constructor that takes a BMGraph as an // argument. //VisualGraph vg = new VisualGraph(infile, edgeAttrib, false); //System.out.println("vg read"); //SimpleVisualGraph origSG = new SimpleVisualGraph(vg); BMGraph bmg = BMGraphUtils.readBMGraph(infile); int origN = bmg.getNodes().size(); //for(int i=0;i<origN;i++) //System.out.println(i+"="+origSG.getVisualNode(i)); System.out.println("bmgraph read"); BMNode[] i2n = new BMNode[origN]; HashMap<BMNode, Integer> n2i = new HashMap<BMNode, Integer>(); { int pi = 0; for (BMNode nod : bmg.getNodes()) { n2i.put(nod, pi); i2n[pi++] = nod; } } DefaultGraph dg = new DefaultGraph(); for (BMEdge e : bmg.getEdges()) { dg.addEdge(n2i.get(e.getSource()), n2i.get(e.getTarget()), Double.parseDouble(e.get(edgeAttrib))); } DefaultGraph origDG = dg.copy(); System.out.println("inputs read"); RatioCompression nopCompressor = new RatioCompression.DefaultRatioCompression(); ResultGraph nopResult = nopCompressor.compressGraph(dg, 1); long start = System.currentTimeMillis(); ResultGraph result = compression.compressGraph(dg, ratio); long timeSpent = System.currentTimeMillis() - start; double seconds = timeSpent * 0.001; BufferedWriter timesWriter = new BufferedWriter(new FileWriter("times.txt", true)); timesWriter.append("" + seconds + "\n"); timesWriter.close(); if (cmd.hasOption("only_times")) { System.out.println("Compression done, exiting."); System.exit(0); } BufferedWriter costsWriter = new BufferedWriter(new FileWriter("costs.txt", true)); costsWriter.append("" + nopResult.getCompressorCosts() + " " + result.getCompressorCosts() + "\n"); costsWriter.close(); double[][] origProb; double[][] compProb; int[] group = new int[origN]; for (int i = 0; i < result.partition.size(); i++) for (int x : result.partition.get(i)) group[x] = i; if (ctype == ConnectivityType.LOCAL) { origProb = new double[origN][origN]; compProb = new double[origN][origN]; DefaultGraph g = result.uncompressedGraph(); for (int i = 0; i < origN; i++) { for (int j = 0; j < origN; j++) { origProb[i][j] = dg.getEdgeWeight(i, j); compProb[i][j] = g.getEdgeWeight(i, j); } } System.out.println("Writing edge-dissimilarity"); } else { origProb = ProbDijkstra.getProbMatrix(origDG); compProb = new double[origN][origN]; System.out.println("nodeCount = " + result.graph.getNodeCount()); double[][] ccProb = ProbDijkstra.getProbMatrix(result.graph); System.out.println("ccProb.length = " + ccProb.length); System.out.println("ccProb[0].length = " + ccProb[0].length); for (int i = 0; i < origN; i++) { for (int j = 0; j < origN; j++) { if (group[i] == group[j]) compProb[i][j] = result.graph.getEdgeWeight(group[i], group[j]); else { int gj = group[j]; int gi = group[i]; compProb[i][j] = ccProb[group[i]][group[j]]; } } } System.out.println("Writing best-path-dissimilarity"); //compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph()); } { BufferedWriter connWr = null;// if (connectivity) { connWr = new BufferedWriter(new FileWriter("connectivity.txt", true)); } double totalDiff = 0; for (int i = 0; i < origN; i++) { for (int j = i + 1; j < origN; j++) { double diff = Math.abs(origProb[i][j] - compProb[i][j]); //VisualNode ni = origSG.getVisualNode(i); //VisualNode nj = origSG.getVisualNode(j); BMNode ni = i2n[i]; BMNode nj = i2n[j]; if (connectivity) connWr.append(ni + "\t" + nj + "\t" + origProb[i][j] + "\t" + compProb[i][j] + "\t" + diff + "\n"); totalDiff += diff * diff; } } if (connectivity) { connWr.append("\n"); connWr.close(); } totalDiff = Math.sqrt(totalDiff); BufferedWriter dissWr = new BufferedWriter(new FileWriter("dissimilarity.txt", true)); dissWr.append("" + totalDiff + "\n"); dissWr.close(); } if (cmd.hasOption("output_bmg")) { BMGraph outgraph = new BMGraph(); String outputfile = cmd.getOptionValue("output_bmg"); HashMap<Integer, BMNode> nodes = new HashMap<Integer, BMNode>(); for (int i = 0; i < result.partition.size(); i++) { ArrayList<Integer> g = result.partition.get(i); if (g.size() == 0) continue; BMNode node = new BMNode("Supernode_" + i); HashMap<String, String> attributes = new HashMap<String, String>(); StringBuffer contents = new StringBuffer(); for (int x : g) contents.append(i2n[x] + ","); contents.delete(contents.length() - 1, contents.length()); attributes.put("nodes", contents.toString()); attributes.put("self-edge", "" + result.graph.getEdgeWeight(i, i)); node.setAttributes(attributes); nodes.put(i, node); outgraph.ensureHasNode(node); } for (int i = 0; i < result.partition.size(); i++) { if (result.partition.get(i).size() == 0) continue; for (int x : result.graph.getNeighbors(i)) { if (x < i) continue; BMNode from = nodes.get(i); BMNode to = nodes.get(x); if (from == null || to == null) { System.out.println(from + "->" + to); System.out.println(i + "->" + x); System.out.println(""); } BMEdge e = new BMEdge(nodes.get(i), nodes.get(x), "notype"); e.setAttributes(new HashMap<String, String>()); e.put("goodness", "" + result.graph.getEdgeWeight(i, x)); outgraph.ensureHasEdge(e); } } BMGraphUtils.writeBMGraph(outgraph, outputfile); } // k medoids! if (kmedoids) { //KMedoidsResult clustersOrig=KMedoids.runKMedoids(origProb,kmedoidsK); if (ctype == ConnectivityType.LOCAL) { compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph()); } //KMedoidsResult compClusters = KMedoids.runKMedoids(ProbDijkstra.getProbMatrix(result.graph),kmedoidsK); KMedoidsResult clustersComp = KMedoids.runKMedoids(compProb, kmedoidsK); BufferedWriter bw = new BufferedWriter(new FileWriter(kmedoidsOutput)); for (int i = 0; i < origN; i++) { int g = group[i]; //bw.append(origSG.getVisualNode(i).getBMNode()+" "+compClusters.clusters[g]+"\n"); bw.append(i2n[i] + " " + clustersComp.clusters[i] + "\n"); } bw.close(); } System.exit(0); }
From source file:net.itransformers.postDiscoverer.core.ReportManager.java
public static void main(String[] args) throws IOException { File projectDir = new File("."); File scriptPath = new File("postDiscoverer/src/main/resources/postDiscoverer/conf/groovy/"); ResourceManagerFactory resourceManagerFactory = new XmlResourceManagerFactory( "iDiscover/resourceManager/xmlResourceManager/src/main/resources/xmlResourceManager/conf/xml/resource.xml"); Map<String, String> resourceManagerParams = new HashMap<>(); resourceManagerParams.put("projectPath", projectDir.getAbsolutePath()); ResourceManager resourceManager = resourceManagerFactory.createResourceManager("xml", resourceManagerParams);//from ww w . j av a 2 s .co m Map<String, String> params = new HashMap<String, String>(); params.put("protocol", "telnet"); params.put("deviceName", "R1"); params.put("deviceType", "CISCO"); params.put("address", "10.17.1.5"); params.put("port", "23"); ResourceType resource = resourceManager.findFirstResourceBy(params); List connectParameters = resource.getConnectionParams(); for (int i = 0; i < connectParameters.size(); i++) { ConnectionParamsType connParamsType = (ConnectionParamsType) connectParameters.get(i); String connectionType = connParamsType.getConnectionType(); if (connectionType.equalsIgnoreCase(params.get("protocol"))) { for (ParamType param : connParamsType.getParam()) { params.put(param.getName(), param.getValue()); } } } File postDiscoveryConfing = new File( projectDir + "/postDiscoverer/src/main/resources/postDiscoverer/conf/xml/reportGenerator.xml"); if (!postDiscoveryConfing.exists()) { System.out.println("File missing: " + postDiscoveryConfing.getAbsolutePath()); return; } ReportGeneratorType reportGenerator = null; FileInputStream is = new FileInputStream(postDiscoveryConfing); try { reportGenerator = JaxbMarshalar.unmarshal(ReportGeneratorType.class, is); } catch (JAXBException e) { logger.info(e); //To change body of catch statement use File | Settings | File Templates. } finally { is.close(); } ReportManager reportManager = new ReportManager(reportGenerator, scriptPath.getPath(), projectDir, "postDiscoverer/conf/xslt/table_creator.xslt"); StringBuffer report = null; HashMap<String, Object> groovyExecutorParams = new HashMap<String, Object>(); for (String s : params.keySet()) { groovyExecutorParams.put(s, params.get(s)); } try { report = reportManager.reportExecutor( new File("/Users/niau/Projects/Projects/netTransformer10/version1/post-discovery"), groovyExecutorParams); } catch (ParserConfigurationException e) { e.printStackTrace(); } catch (SAXException e) { e.printStackTrace(); } if (report != null) { System.out.println(report.toString()); } else { System.out.println("Report generation failed!"); } }
From source file:net.java.sen.tools.MkCompoundTable.java
/** * Build compound word table.//www .j a v a2 s. c o m */ public static void main(String args[]) { ResourceBundle rb = ResourceBundle.getBundle("dictionary"); int pos_start = Integer.parseInt(rb.getString("pos_start")); int pos_size = Integer.parseInt(rb.getString("pos_size")); try { log.info("reading compound word information ... "); HashMap compoundTable = new HashMap(); log.info("load dic: " + rb.getString("compound_word_file")); BufferedReader dicStream = new BufferedReader(new InputStreamReader( new FileInputStream(rb.getString("compound_word_file")), rb.getString("dic.charset"))); String t; int line = 0; StringBuffer pos_b = new StringBuffer(); while ((t = dicStream.readLine()) != null) { CSVParser parser = new CSVParser(t); String csv[] = parser.nextTokens(); if (csv.length < (pos_size + pos_start)) { throw new RuntimeException("format error:" + line); } pos_b.setLength(0); for (int i = pos_start; i < (pos_start + pos_size - 1); i++) { pos_b.append(csv[i]); pos_b.append(','); } pos_b.append(csv[pos_start + pos_size - 1]); pos_b.append(','); for (int i = pos_start + pos_size; i < (csv.length - 2); i++) { pos_b.append(csv[i]); pos_b.append(','); } pos_b.append(csv[csv.length - 2]); compoundTable.put(pos_b.toString(), csv[csv.length - 1]); } dicStream.close(); log.info("done."); log.info("writing compound word table ... "); ObjectOutputStream os = new ObjectOutputStream( new FileOutputStream(rb.getString("compound_word_table"))); os.writeObject(compoundTable); os.close(); log.info("done."); } catch (Exception e) { e.printStackTrace(); System.exit(1); } }
From source file:fr.inria.edelweiss.kgdqp.core.FedQueryingCLI.java
@SuppressWarnings("unchecked") public static void main(String args[]) throws ParseException, EngineException { List<String> endpoints = new ArrayList<String>(); String queryPath = null;/* w w w . j a va 2s . c om*/ int slice = -1; Options options = new Options(); Option helpOpt = new Option("h", "help", false, "print this message"); Option queryOpt = new Option("q", "query", true, "specify the sparql query file"); Option endpointOpt = new Option("e", "endpoints", true, "the list of federated sparql endpoint URLs"); Option groupingOpt = new Option("g", "grouping", true, "triple pattern optimisation"); Option slicingOpt = new Option("s", "slicing", true, "size of the slicing parameter"); Option versionOpt = new Option("v", "version", false, "print the version information and exit"); options.addOption(queryOpt); options.addOption(endpointOpt); options.addOption(helpOpt); options.addOption(versionOpt); options.addOption(groupingOpt); options.addOption(slicingOpt); String header = "Corese/KGRAM DQP command line interface"; String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr"; CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("kgdqp", header, options, footer, true); System.exit(0); } if (!cmd.hasOption("e")) { logger.info("You must specify at least the URL of one sparql endpoint !"); System.exit(0); } else { endpoints = new ArrayList<String>(Arrays.asList(cmd.getOptionValues("e"))); } if (!cmd.hasOption("q")) { logger.info("You must specify a path for a sparql query !"); System.exit(0); } else { queryPath = cmd.getOptionValue("q"); } if (cmd.hasOption("s")) { try { slice = Integer.parseInt(cmd.getOptionValue("s")); } catch (NumberFormatException ex) { logger.warn(cmd.getOptionValue("s") + " is not formatted as number for the slicing parameter"); logger.warn("Slicing disabled"); } } if (cmd.hasOption("v")) { logger.info("version 3.0.4-SNAPSHOT"); System.exit(0); } ///////////////// Graph graph = Graph.create(); QueryProcessDQP exec = QueryProcessDQP.create(graph); exec.setGroupingEnabled(cmd.hasOption("g")); if (slice > 0) { exec.setSlice(slice); } Provider sProv = ProviderImplCostMonitoring.create(); exec.set(sProv); for (String url : endpoints) { try { exec.addRemote(new URL(url), WSImplem.REST); } catch (MalformedURLException ex) { logger.error(url + " is not a well-formed URL"); System.exit(1); } } StringBuffer fileData = new StringBuffer(1000); BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(queryPath)); } catch (FileNotFoundException ex) { logger.error("Query file " + queryPath + " not found !"); System.exit(1); } char[] buf = new char[1024]; int numRead = 0; try { while ((numRead = reader.read(buf)) != -1) { String readData = String.valueOf(buf, 0, numRead); fileData.append(readData); buf = new char[1024]; } reader.close(); } catch (IOException ex) { logger.error("Error while reading query file " + queryPath); System.exit(1); } String sparqlQuery = fileData.toString(); // Query q = exec.compile(sparqlQuery, null); // System.out.println(q); StopWatch sw = new StopWatch(); sw.start(); Mappings map = exec.query(sparqlQuery); int dqpSize = map.size(); System.out.println("--------"); long time = sw.getTime(); System.out.println(time + " " + dqpSize); }
From source file:comparetopics.CompareTopics.java
/** * @param args the command line arguments *///from w w w.java 2 s . c o m public static void main(String[] args) { try { File file1 = new File( "/Users/apple/Desktop/graduation-project/topic-modeling/output/jhotdraw-extracted-code/keys.txt"); File file2 = new File( "/Users/apple/Desktop/graduation-project/topic-modeling/output/jhotdraw-extracted-code/keys.txt"); CompareTopics compareTopics = new CompareTopics(); String[] words1 = compareTopics.getWords(file1); String[] words2 = compareTopics.getWords(file2); StringBuffer words = new StringBuffer(); File outputFile = new File("/Users/apple/Desktop/test.txt"); if (outputFile.createNewFile()) { System.out.println("Create successful: " + outputFile.getName()); } boolean hasSame = false; for (String w1 : words1) { if (!NumberUtils.isNumber(w1)) { for (String w2 : words2) { if (w1.equals(w2)) { words.append(w1); // words.append("\r\n"); words.append(" "); hasSame = true; break; } } } } if (!hasSame) { System.out.println("No same word."); } else { compareTopics.printToFile(words.toString(), outputFile); } } catch (IOException ex) { Logger.getLogger(CompareTopics.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.archivas.clienttools.arcmover.cli.ManagedCLIJob.java
@SuppressWarnings({ "UseOfSystemOutOrSystemErr" }) public static void main(String args[]) { if (LOG.isLoggable(Level.FINE)) { StringBuffer sb = new StringBuffer(); sb.append("Program Arguments").append(NEWLINE); for (int i = 0; i < args.length; i++) { sb.append(" ").append(i).append(": ").append(args[i]); sb.append(NEWLINE);/*from w ww .j ava 2 s. c om*/ } LOG.log(Level.FINE, sb.toString()); } ConfigurationHelper.validateLaunchOK(); ManagedCLIJob arcCmd = null; try { if (args[0].equals("copy")) { arcCmd = new ArcCopy(args, 2); } else if (args[0].equals("delete")) { arcCmd = new ArcDelete(args, 2); } else if (args[0].equals("metadata")) { arcCmd = new ArcMetadata(args, 2); } else { throw new RuntimeException("Unsupported operation: " + args[0]); } arcCmd.parseArgs(); if (arcCmd.shouldPrintHelp()) { System.out.println(arcCmd.helpScreen()); } else { arcCmd.execute(new PrintWriter(System.out), new PrintWriter(System.err)); } } catch (ParseException e) { System.out.println("Error: " + e.getMessage()); System.out.println(); System.out.println(arcCmd.helpScreen()); arcCmd.setExitCode(EXIT_CODE_OPTION_PARSE_ERROR); } catch (Exception e) { LOG.log(Level.SEVERE, e.getMessage(), e); System.out.println(); System.err.println("Job failed. " + e.getMessage()); if (arcCmd != null) { arcCmd.setExitCode(EXIT_CODE_DM_ERROR); } } finally { if (arcCmd != null) { arcCmd.exit(); } } }