List of usage examples for java.io LineNumberReader close
public void close() throws IOException
From source file:CSV_ReportingConsolidator.java
public static void main(String[] args) throws IOException { // Construct an array containing the list of files in the input folder String inputPath = "input/"; // Set the directory containing the CSV files String outputPath = "output/"; // Set the output directory for the consolidated report String outputFile = "Consolidated_CSV_Report.csv"; File folder = new File(inputPath); // Load the selected path File[] listOfFiles = folder.listFiles(); // Retrieve the list of files from the directory // Serialize the reference headers to write the output CSV header CSVReader referenceReader = new CSVReader(new FileReader("reference/example_fields.csv")); String[] referenceHeaders = referenceReader.readNext(); CSVWriter writer = new CSVWriter(new FileWriter(outputPath + outputFile), ',', CSVWriter.NO_QUOTE_CHARACTER); System.out.println("-- CSV parser initiated, found " + listOfFiles.length + " input files.\n"); for (int i = 0; i < listOfFiles.length; i++) { if (listOfFiles[i].isFile()) { String filename = listOfFiles[i].getName(); // Retrieve the file name if (!filename.endsWith("csv")) { // Check if the file has a CSV extension System.out.println("EE | Fatal error: The input path contains non-csv files: " + filename + ".\n Please remove them and try again."); writer.close();// w w w . j av a 2s . c om System.exit(1); // Exit if non-CSV files are found } String filePath = String.valueOf(inputPath + filename); // Combine the path with the filename File file = new File(filePath); CSVReader csvFile = new CSVReader(new FileReader(filePath)); String[] nextLine; // CSV line data container int rowIterator = 0; // Used to loop between rows int colIterator = 0; // Used to loop between columns int rowCount = 0; // Used to count the total number of rows int pageCount = 0; int f = 0; String[] pageName = new String[100]; // Holder for Page names double[] individualPRT = new double[100]; // Holder for Page Response Times String PTrun = ""; // Name of Performance Test Run String startTime = ""; // Test start time double PRT = 0; // Average Page Response Time double PRd = 0; // Page Response Time Standard Deviation double ERT = 0; // Average Element Response Time double ERd = 0; // Element Response Time Standard Deviation double MRT = 0; // Maximum Page Response Time double mRT = 0; // Minimum Page Response Time int elapsedTime = 0; // Test Elapsed Time int completedUsers = 0; // Number of Completed Users int TPA = 0; // Total Page Attempts int TPH = 0; // Total Page Hits int TEA = 0; // Total Element Attempts int TEH = 0; // Total Element Hits // Fetch the total row count: FileReader fr = new FileReader(file); LineNumberReader ln = new LineNumberReader(fr); while (ln.readLine() != null) { rowCount++; } ln.close(); // Close the file reader // Fetch test identification data: nextLine = csvFile.readNext(); PTrun = nextLine[1]; // Name of Performance Test Run nextLine = csvFile.readNext(); startTime = nextLine[1]; // Performance Test Start Time // Skip 9 uninteresting rows: while (rowIterator < 9) { nextLine = csvFile.readNext(); rowIterator++; } // Check if there are VP fails (adds another column) if (nextLine[9].equals("Total Page VPs Error For Run")) { f = 2; } else if (nextLine[8].equals("Total Page VPs Failed For Run") || nextLine[8].equals("Total Page VPs Error For Run")) { f = 1; } else { f = 0; } // Read the page titles: while (colIterator != -1) { pageName[colIterator] = nextLine[colIterator + 16 + f]; if ((pageName[colIterator].equals(pageName[0])) && colIterator > 0) { pageCount = colIterator; pageName[colIterator] = null; colIterator = -1; // Detects when the page titles start to repeat } else { colIterator++; } } // Retrieve non-continuous performance data, auto-detect gaps, auto-convert in seconds where needed nextLine = csvFile.readNext(); nextLine = csvFile.readNext(); while (rowIterator < rowCount - 3) { if (nextLine.length > 1) { if (nextLine[0].length() != 0) { elapsedTime = Integer.parseInt(nextLine[0]) / 1000; } } if (nextLine.length > 4) { if (nextLine[4].length() != 0) { completedUsers = Integer.parseInt(nextLine[4]); } } if (nextLine.length > 6 + f) { if (nextLine[6 + f].length() != 0) { TPA = (int) Double.parseDouble(nextLine[6 + f]); } } if (nextLine.length > 7 + f) { if (nextLine[7 + f].length() != 0) { TPH = (int) Double.parseDouble(nextLine[7 + f]); } } if (nextLine.length > 12 + f) { if (nextLine[12 + f].length() != 0) { TEA = (int) Double.parseDouble(nextLine[12 + f]); } } if (nextLine.length > 13 + f) { if (nextLine[13 + f].length() != 0) { TEH = (int) Double.parseDouble(nextLine[13 + f]); } } if (nextLine.length > 8 + f) { if (nextLine[8 + f].length() != 0) { PRT = Double.parseDouble(nextLine[8 + f]) / 1000; } } if (nextLine.length > 9 + f) { if (nextLine[9 + f].length() != 0) { PRd = Double.parseDouble(nextLine[9 + f]) / 1000; } } if (nextLine.length > 14 + f) { if (nextLine[14 + f].length() != 0) { ERT = Double.parseDouble(nextLine[14 + f]) / 1000; } } if (nextLine.length > 15 + f) { if (nextLine[15 + f].length() != 0) { ERd = Double.parseDouble(nextLine[15 + f]) / 1000; } } if (nextLine.length > 10 + f) { if (nextLine[10 + f].length() != 0) { MRT = Double.parseDouble(nextLine[10 + f]) / 1000; } } if (nextLine.length > 11 + f) { if (nextLine[11 + f].length() != 0) { mRT = Double.parseDouble(nextLine[11 + f]) / 1000; } } nextLine = csvFile.readNext(); rowIterator++; } // Convert the elapsed time from seconds to HH:MM:SS format int hours = elapsedTime / 3600, remainder = elapsedTime % 3600, minutes = remainder / 60, seconds = remainder % 60; String eTime = (hours < 10 ? "0" : "") + hours + ":" + (minutes < 10 ? "0" : "") + minutes + ":" + (seconds < 10 ? "0" : "") + seconds; csvFile.close(); // File recycled to reset the line parser CSVReader csvFile2 = new CSVReader(new FileReader(filePath)); // Reset iterators to allow re-usage: rowIterator = 0; colIterator = 0; // Skip first 13 rows: while (rowIterator < 13) { nextLine = csvFile2.readNext(); rowIterator++; } // Dynamically retrieve individual page response times in seconds, correlate with page names: while (rowIterator < rowCount) { while (colIterator < pageCount) { if (nextLine.length > 16 + f) { if (nextLine[colIterator + 16 + f].length() != 0) { individualPRT[colIterator] = Double.parseDouble(nextLine[colIterator + 16 + f]) / 1000; } } colIterator++; } nextLine = csvFile2.readNext(); rowIterator++; colIterator = 0; } csvFile2.close(); // Final file closing // Reset iterators to allow re-usage: rowIterator = 0; colIterator = 0; // Display statistics in console, enable only for debugging purposes: /* System.out.println(" Elapsed Time: " + elapsedTime + "\n Completed Users: " + completedUsers + "\n Total Page Attempts: " + TPA + "\n Total Page Hits: " + TPH + "\n Average Response Time For All Pages For Run: " + PRT + "\n Response Time Standard Deviation For All Pages For Run: " + PRd + "\n Maximum Response Time For All Pages For Run: " + MRT + "\n Minimum Response Time For All Pages For Run: " + mRT + "\n Total Page Element Attempts: " + TEA + "\n Total Page Element Hits: " + TEH + "\n Average Response Time For All Page Elements For Run: " + ERT + "\n Response Time Standard Deviation For All Page Elements For Run: " + ERd + "\n"); // Display individual page response times in console: while (colIterator < 9) { System.out.println("Page " + Page[colIterator] + " - Response Time: " + PagePRT[colIterator]); colIterator++; } */ // Serialize individual Page Response Times into CSV values StringBuffer individualPRTList = new StringBuffer(); if (individualPRT.length > 0) { individualPRTList.append(String.valueOf(individualPRT[0])); for (int k = 1; k < pageCount; k++) { individualPRTList.append(","); individualPRTList.append(String.valueOf(individualPRT[k])); } } // Serialize all retrieved performance parameters: String[] entries = { PTrun, startTime, String.valueOf(completedUsers), eTime, String.valueOf(TPA), String.valueOf(TPH), String.valueOf(PRT), String.valueOf(PRd), String.valueOf(MRT), String.valueOf(mRT), String.valueOf(TEA), String.valueOf(TEH), String.valueOf(ERT), String.valueOf(ERd), "", individualPRTList.toString(), }; // Define header and write it to the first CSV row Object[] headerConcatenator = ArrayUtils.addAll(referenceHeaders, pageName); String[] header = new String[referenceHeaders.length + pageCount]; header = Arrays.copyOf(headerConcatenator, header.length, String[].class); if (i == 0) { writer.writeNext(header); // Write CSV header } writer.writeNext(entries); // Write performance parameters in CSV format System.out.println("== Processed: " + filename + " ==========================="); } } writer.close(); // Close the CSV writer System.out.println("\n-- Done processing " + listOfFiles.length + " files." + "\n-- The consolidated report has been saved to " + outputPath + outputFile); }
From source file:Main.java
public static void main(String[] args) throws IOException { FileReader fr = new FileReader("C:/test.txt"); LineNumberReader lnr = new LineNumberReader(fr); // reads and prints data from reader System.out.println((char) lnr.read()); System.out.println((char) lnr.read()); // mark invoked at this position lnr.mark(0);/*from w ww . j a v a 2 s .com*/ System.out.println("mark() invoked"); System.out.println((char) lnr.read()); System.out.println((char) lnr.read()); // if this reader supports mark() an reset() if (lnr.markSupported()) { // reset() repositioned the stream to the mark lnr.reset(); System.out.println("reset() invoked"); System.out.println((char) lnr.read()); System.out.println((char) lnr.read()); } lnr.close(); }
From source file:Main.java
public static void main(String[] args) throws IOException { // create new reader FileReader fr = new FileReader("C:/test.txt"); LineNumberReader lnr = new LineNumberReader(fr); // reads and prints data from reader System.out.println((char) lnr.read()); System.out.println((char) lnr.read()); // mark invoked at this position lnr.mark(0);// w w w . ja va2s . c o m System.out.println("mark() invoked"); System.out.println((char) lnr.read()); System.out.println((char) lnr.read()); // if this reader supports mark() an reset() if (lnr.markSupported()) { // reset() repositioned the reader to the mark lnr.reset(); System.out.println("reset() invoked"); System.out.println((char) lnr.read()); System.out.println((char) lnr.read()); } lnr.close(); }
From source file:org.mzd.shap.sql.FunctionalTable.java
/** * @param args/*from w ww . jav a2 s . co m*/ */ public static void main(String[] args) throws Exception { if (args.length != 2) { throw new Exception("Usage: [func_cat.txt] [cog-to-cat.txt]"); } BasicDataSource ds = new BasicDataSource(); ds.setDriverClassName("com.mysql.jdbc.Driver"); ds.setUrl("jdbc:mysql://localhost/Dummy"); ds.setUsername("test"); ds.setPassword("xeno12"); Connection conn = ds.getConnection(); // Get the Sequence ID list. PreparedStatement insert = null; LineNumberReader reader = null; try { reader = new LineNumberReader(new FileReader(args[0])); insert = conn.prepareStatement("INSERT INTO CogCategories (code,function,class) values (?,?,?)"); while (true) { String line = reader.readLine(); if (line == null) { break; } String[] fields = line.split("\t"); if (fields.length != 3) { throw new Exception("Bad number of fields [" + fields.length + "] for line [" + line + "]"); } insert.setString(1, fields[0]); insert.setString(2, fields[1]); insert.setString(3, fields[2]); insert.executeUpdate(); } insert.close(); reader.close(); reader = new LineNumberReader(new FileReader(args[1])); insert = conn.prepareStatement("INSERT INTO CogFunctions (accession,code) values (?,?)"); while (true) { String line = reader.readLine(); if (line == null) { break; } String[] fields = line.split("\\s+"); if (fields.length != 2) { throw new Exception("Bad number of fields [" + fields.length + "] for line [" + line + "]"); } for (char code : fields[1].toCharArray()) { insert.setString(1, fields[0]); insert.setString(2, String.valueOf(code)); insert.executeUpdate(); } } insert.close(); } finally { if (reader != null) { reader.close(); } conn.close(); ds.close(); } }
From source file:com.github.fritaly.graphml4j.samples.GradleDependencies.java
public static void main(String[] args) throws Exception { if (args.length != 1) { System.out.println(String.format("%s <output-file>", GradleDependencies.class.getSimpleName())); System.exit(1);// w ww . j ava 2 s. c o m } final File file = new File(args[0]); System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ..."); FileWriter fileWriter = null; GraphMLWriter graphWriter = null; Reader reader = null; LineNumberReader lineReader = null; try { fileWriter = new FileWriter(file); graphWriter = new GraphMLWriter(fileWriter); // Customize the rendering of nodes final NodeStyle nodeStyle = graphWriter.getNodeStyle(); nodeStyle.setWidth(250.0f); graphWriter.setNodeStyle(nodeStyle); // The dependency graph has been generated by Gradle with the // command "gradle dependencies". The output of this command has // been saved to a text file which will be parsed to rebuild the // dependency graph reader = new InputStreamReader(GradleDependencies.class.getResourceAsStream("gradle-dependencies.txt")); lineReader = new LineNumberReader(reader); String line = null; // Stack containing the node identifiers per depth inside the // dependency graph (the topmost dependency is the first one in the // stack) final Stack<String> parentIds = new Stack<String>(); // Open the graph graphWriter.graph(); // Map storing the node identifiers per label final Map<String, String> nodeIdsByLabel = new TreeMap<String, String>(); while ((line = lineReader.readLine()) != null) { // Determine the depth of the current dependency inside the // graph. The depth can be inferred from the indentation used by // Gradle. Each level of depth adds 5 more characters of // indentation final int initialLength = line.length(); // Remove the strings used by Gradle to indent dependencies line = StringUtils.replace(line, "+--- ", ""); line = StringUtils.replace(line, "| ", ""); line = StringUtils.replace(line, "\\--- ", ""); line = StringUtils.replace(line, " ", ""); // The depth can easily be inferred now final int depth = (initialLength - line.length()) / 5; // Remove unnecessary node ids while (depth <= parentIds.size()) { parentIds.pop(); } // Compute a nice label from the dependency (group, artifact, // version) tuple final String label = computeLabel(line); // Has this dependency already been added to the graph ? if (!nodeIdsByLabel.containsKey(label)) { // No, add the node nodeIdsByLabel.put(label, graphWriter.node(label)); } final String nodeId = nodeIdsByLabel.get(label); parentIds.push(nodeId); if (parentIds.size() > 1) { // Generate an edge between the current node and its parent graphWriter.edge(parentIds.get(parentIds.size() - 2), nodeId); } } // Close the graph graphWriter.closeGraph(); System.out.println("Done"); } finally { // Calling GraphMLWriter.close() is necessary to dispose the underlying resources graphWriter.close(); fileWriter.close(); lineReader.close(); reader.close(); } }
From source file:com.github.fritaly.graphml4j.samples.GradleDependenciesWithGroups.java
public static void main(String[] args) throws Exception { if (args.length != 1) { System.out/* w ww . ja va 2 s . c om*/ .println(String.format("%s <output-file>", GradleDependenciesWithGroups.class.getSimpleName())); System.exit(1); } final File file = new File(args[0]); System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ..."); FileWriter fileWriter = null; GraphMLWriter graphWriter = null; Reader reader = null; LineNumberReader lineReader = null; try { fileWriter = new FileWriter(file); graphWriter = new GraphMLWriter(fileWriter); // Customize the rendering of nodes final NodeStyle nodeStyle = graphWriter.getNodeStyle(); nodeStyle.setWidth(250.0f); nodeStyle.setHeight(50.0f); graphWriter.setNodeStyle(nodeStyle); // The dependency graph has been generated by Gradle with the // command "gradle dependencies". The output of this command has // been saved to a text file which will be parsed to rebuild the // dependency graph reader = new InputStreamReader( GradleDependenciesWithGroups.class.getResourceAsStream("gradle-dependencies.txt")); lineReader = new LineNumberReader(reader); String line = null; // Stack containing the artifacts per depth inside the dependency // graph (the topmost dependency is the first one in the stack) final Stack<Artifact> stack = new Stack<Artifact>(); final Map<String, Set<Artifact>> artifactsByGroup = new HashMap<String, Set<Artifact>>(); // List of parent/child relationships between artifacts final List<Relationship> relationships = new ArrayList<Relationship>(); while ((line = lineReader.readLine()) != null) { // Determine the depth of the current dependency inside the // graph. The depth can be inferred from the indentation used by // Gradle. Each level of depth adds 5 more characters of // indentation final int initialLength = line.length(); // Remove the strings used by Gradle to indent dependencies line = StringUtils.replace(line, "+--- ", ""); line = StringUtils.replace(line, "| ", ""); line = StringUtils.replace(line, "\\--- ", ""); line = StringUtils.replace(line, " ", ""); // The depth can easily be inferred now final int depth = (initialLength - line.length()) / 5; // Remove unnecessary artifacts while (depth <= stack.size()) { stack.pop(); } // Create an artifact from the dependency (group, artifact, // version) tuple final Artifact artifact = createArtifact(line); stack.push(artifact); if (stack.size() > 1) { // Store the artifact and its parent relationships.add(new Relationship(stack.get(stack.size() - 2), artifact)); } if (!artifactsByGroup.containsKey(artifact.group)) { artifactsByGroup.put(artifact.group, new HashSet<Artifact>()); } artifactsByGroup.get(artifact.group).add(artifact); } // Open the graph graphWriter.graph(); final Map<Artifact, String> nodeIdsByArtifact = new HashMap<Artifact, String>(); // Loop over the groups and generate the associated nodes for (String group : artifactsByGroup.keySet()) { graphWriter.group(group, true); for (Artifact artifact : artifactsByGroup.get(group)) { final String nodeId = graphWriter.node(artifact.getLabel()); nodeIdsByArtifact.put(artifact, nodeId); } graphWriter.closeGroup(); } // Generate the edges for (Relationship relationship : relationships) { final String parentId = nodeIdsByArtifact.get(relationship.parent); final String childId = nodeIdsByArtifact.get(relationship.child); graphWriter.edge(parentId, childId); } // Close the graph graphWriter.closeGraph(); System.out.println("Done"); } finally { // Calling GraphMLWriter.close() is necessary to dispose the underlying resources graphWriter.close(); fileWriter.close(); lineReader.close(); reader.close(); } }
From source file:com.github.fritaly.graphml4j.samples.GradleDependenciesWithGroupsAndBuffering.java
public static void main(String[] args) throws Exception { if (args.length != 1) { System.out.println(String.format("%s <output-file>", GradleDependenciesWithGroupsAndBuffering.class.getSimpleName())); System.exit(1);//from w w w.j ava 2s. com } final File file = new File(args[0]); System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ..."); FileWriter fileWriter = null; Reader reader = null; LineNumberReader lineReader = null; try { fileWriter = new FileWriter(file); final com.github.fritaly.graphml4j.datastructure.Graph graph = new Graph(); // The dependency graph has been generated by Gradle with the // command "gradle dependencies". The output of this command has // been saved to a text file which will be parsed to rebuild the // dependency graph reader = new InputStreamReader( GradleDependenciesWithGroupsAndBuffering.class.getResourceAsStream("gradle-dependencies.txt")); lineReader = new LineNumberReader(reader); String line = null; // Stack containing the nodes per depth inside the dependency graph // (the topmost dependency is the first one in the stack) final Stack<Node> parentNodes = new Stack<Node>(); while ((line = lineReader.readLine()) != null) { // Determine the depth of the current dependency inside the // graph. The depth can be inferred from the indentation used by // Gradle. Each level of depth adds 5 more characters of // indentation final int initialLength = line.length(); // Remove the strings used by Gradle to indent dependencies line = StringUtils.replace(line, "+--- ", ""); line = StringUtils.replace(line, "| ", ""); line = StringUtils.replace(line, "\\--- ", ""); line = StringUtils.replace(line, " ", ""); // The depth can easily be inferred now final int depth = (initialLength - line.length()) / 5; // Remove unnecessary node ids while (depth <= parentNodes.size()) { parentNodes.pop(); } final Artifact artifact = createArtifact(line); Node node = graph.getNodeByData(artifact); // Has this dependency already been added to the graph ? if (node == null) { // No, add the node node = graph.addNode(artifact); } parentNodes.push(node); if (parentNodes.size() > 1) { // Generate an edge between the current node and its parent graph.addEdge("Depends on", parentNodes.get(parentNodes.size() - 2), node); } } // Create the groups after creating the nodes & edges for (Node node : graph.getNodes()) { final Artifact artifact = (Artifact) node.getData(); final String groupId = artifact.group; Node groupNode = graph.getNodeByData(groupId); if (groupNode == null) { groupNode = graph.addNode(groupId); } // add the node to the group node.setParent(groupNode); } graph.toGraphML(fileWriter, new Renderer() { @Override public String getNodeLabel(Node node) { return node.isGroup() ? node.getData().toString() : ((Artifact) node.getData()).getLabel(); } @Override public boolean isGroupOpen(Node node) { return true; } @Override public NodeStyle getNodeStyle(Node node) { // Customize the rendering of nodes final NodeStyle nodeStyle = new NodeStyle(); nodeStyle.setWidth(250.0f); return nodeStyle; } @Override public GroupStyles getGroupStyles(Node node) { return new GroupStyles(); } @Override public EdgeStyle getEdgeStyle(Edge edge) { return new EdgeStyle(); } }); System.out.println("Done"); } finally { // Calling GraphMLWriter.close() is necessary to dispose the underlying resources fileWriter.close(); lineReader.close(); reader.close(); } }
From source file:Main.java
static long getLineCountForFile(File file) throws IOException { LineNumberReader in = new LineNumberReader(new FileReader(file)); long numLines = 0; while (in.readLine() != null) ;//from www .j a v a 2 s .c o m numLines = in.getLineNumber(); in.close(); return numLines; }
From source file:Main.java
/** * Loads an index (Hashtable) from a file. * //from w w w.j av a 2s . c o m * @param root_ * The file where the index is stored in. * @return The indextable. * @exception IOException * If an internal error prevents the file from being read. */ public static Hashtable loadIndex(File root_, String name) throws IOException { File indexfile = new File(root_, name); Hashtable index = new Hashtable(); if (indexfile.exists()) { LineNumberReader in = new LineNumberReader(new FileReader(indexfile)); while (in.ready()) index.put(in.readLine(), new Long(in.readLine())); in.close(); } return index; }
From source file:Main.java
public static String getMac() { String macAdress = null;/*from w ww. ja va 2 s. co m*/ String str = ""; try { Process pp = Runtime.getRuntime().exec("cat /sys/class/net/wlan0/address "); InputStreamReader ir = new InputStreamReader(pp.getInputStream()); LineNumberReader input = new LineNumberReader(ir); for (; null != str;) { str = input.readLine(); if (str != null) { macAdress = str.trim(); break; } } ir.close(); input.close(); } catch (IOException ex) { ex.printStackTrace(); } return macAdress; }