Example usage for java.io FileReader close

List of usage examples for java.io FileReader close

Introduction

In this page you can find the example usage for java.io FileReader close.

Prototype

public void close() throws IOException 

Source Link

Usage

From source file:fridgegameinstaller.installation.java

public String getLocalModpackVersion() {
    System.out.println(mcloc + "/fridgegame_version.fg");
    File fgver = new File(mcloc + "/fridgegame_version.fg");
    String ver = null;//from w  w w.j a va  2  s  .  c  om
    if (fgver.exists()) {

        try {
            FileReader reader = new FileReader(fgver);
            char[] chars = new char[(int) fgver.length()];
            reader.read(chars);
            ver = new String(chars);
            reader.close();

        } catch (IOException e) {
            e.printStackTrace();
            mainFrame.errorMsg("An error occured while reading Modpack version.More information in the log",
                    "Error");
            mainFrame.setFormToPostInstallation();

        }

    } else {

        ver = "0";
    }
    System.out.println(ver);
    return ver;
}

From source file:org.esa.cci.sst.tools.BasicTool.java

private void addConfigurationProperties(File configurationFile) {
    FileReader reader = null;
    try {// ww  w  . j av a2  s.  c o m
        reader = new FileReader(configurationFile);
        config.load(reader);
    } catch (FileNotFoundException e) {
        throw new ToolException(MessageFormat.format("File not found: {0}", configurationFile), e,
                ToolException.CONFIGURATION_FILE_NOT_FOUND_ERROR);
    } catch (IOException e) {
        throw new ToolException(MessageFormat.format("Failed to read from {0}.", configurationFile), e,
                ToolException.CONFIGURATION_FILE_IO_ERROR);
    } finally {
        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
                // ignore
            }
        }
    }
}

From source file:com.splunk.Command.java

public Command load(String path) {
    ArrayList<String> argList = new ArrayList<String>();

    try {/*from ww w.j  a v  a  2  s  .  com*/
        FileReader fileReader = new FileReader(path);
        try {
            BufferedReader reader = new BufferedReader(fileReader);
            while (true) {
                String line;
                line = reader.readLine();
                if (line == null)
                    break;
                if (line.startsWith("#"))
                    continue;
                line = line.trim();
                if (line.length() == 0)
                    continue;
                if (!line.startsWith("-"))
                    line = "--" + line;
                argList.add(line);
            }
        } finally {
            fileReader.close();
        }
    } catch (IOException e) {
        error(e.getMessage());
        return this;
    }

    parse(argList.toArray(new String[argList.size()]));
    return this;
}

From source file:org.eclipse.kura.deployment.agent.impl.DeploymentAgent.java

private void installPackagesFromConfFile() {

    if (this.m_dpaConfPath != null) {
        FileReader fr = null;
        try {//from   w w  w. ja v  a  2  s .  co  m
            fr = new FileReader(this.m_dpaConfPath);
            this.m_deployedPackages.load(fr);
        } catch (IOException e) {
            s_logger.error("Exception loading deployment packages configuration file", e);
        } finally {
            if (fr != null) {
                try {
                    fr.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    Set<Object> packageNames = this.m_deployedPackages.keySet();
    for (Object packageName : packageNames) {
        String packageUrl = (String) this.m_deployedPackages.get(packageName);

        s_logger.info("Deploying package name {} at URL {}", packageName, packageUrl);
        try {
            installDeploymentPackageAsync(packageUrl);
        } catch (Exception e) {
            s_logger.error("Error installing package {}", packageName, e);
        }
    }
}

From source file:org.apache.archiva.proxy.AbstractProxyTestCase.java

/**
 * Read the first line from the checksum file, and return it (trimmed).
 *//*from   ww w. j a va 2s  . c  o  m*/
protected String readChecksumFile(File checksumFile) throws Exception {
    FileReader freader = null;
    BufferedReader buf = null;

    try {
        freader = new FileReader(checksumFile);
        buf = new BufferedReader(freader);
        return buf.readLine();
    } finally {
        if (buf != null) {
            buf.close();
        }

        if (freader != null) {
            freader.close();
        }
    }
}

From source file:com.apifest.doclet.integration.tests.DocletTest.java

@Test
public void check_what_doclet_will_generate_when_tags_are_wrong() throws Exception {
    // GIVEN/*from ww  w  . j  av  a  2s . c o  m*/
    String parserFilePath = "./all-mappings-docs.json";
    // WHEN
    runDoclet();
    // THEN
    JSONParser parser = new JSONParser();
    FileReader fileReader = null;
    try {
        fileReader = new FileReader(parserFilePath);
        JSONObject json = (JSONObject) parser.parse(fileReader);
        JSONArray arr = (JSONArray) json.get("endpoints");
        JSONObject obj = (JSONObject) arr.get(0);
        Assert.assertEquals(obj.get("wrongtag"), null);
        Assert.assertEquals(obj.get("@wrongtag"), null);
    } finally {
        if (fileReader != null) {
            fileReader.close();
        }
        deleteJsonFile(parserFilePath);
    }
}

From source file:com.tupilabs.human_name_parser.ParserTest.java

@Test
public void testAll() throws IOException {
    BufferedReader buffer = null;
    FileReader reader = null;
    Splitter splitter = Splitter.on('|');

    try {/*from  ww  w  . j a v a  2 s.  co m*/
        reader = new FileReader(testNames);
        buffer = new BufferedReader(reader);

        String line = null;
        while ((line = buffer.readLine()) != null) {
            if (StringUtils.isBlank(line)) {
                LOGGER.warning("Empty line in testNames.txt");
                continue;
            }

            List<String> toks = splitter.splitToList(line);
            if (toks.size() != 9) {
                LOGGER.warning(String.format("Invalid line in testNames.txt: %s", line));
                continue;
            }

            validateLine(toks);
        }
    } finally {
        if (reader != null)
            reader.close();
        if (buffer != null)
            buffer.close();
    }
}

From source file:edu.vt.cs.cnd2xsd.Cnd2XsdConverter.java

/**
 * Registers custom NodeType definitions to the RSR determined by the session.
 * @param session the RSR session/* ww  w  . j  av a  2 s  . com*/
 * @param cndFileName path to the CND file
 * @return the array of registered NodeTypes in the RSR
 * @throws RepositoryException
 * @throws IOException
 */
public static NodeType[] RegisterCustomNodeTypes(Session session, String cndFileName, String prefix)
        throws RepositoryException, IOException {
    FileReader reader = null;
    try {
        reader = new FileReader(cndFileName);

        NodeType[] newNodeTypes = CndImporter.registerNodeTypes(reader, session);
        for (NodeType nt : newNodeTypes) {
            log.debug("Registered: " + nt.getName());
        }
        if (newNodeTypes == null || newNodeTypes.length == 0) {
            NodeTypeManager man = session.getWorkspace().getNodeTypeManager();
            NodeTypeIterator nit = man.getAllNodeTypes();
            List<NodeType> nlist = new LinkedList<NodeType>();
            while (nit.hasNext()) {
                NodeType nt = nit.nextNodeType();
                if (nt.getName().contains(prefix)) {
                    log.debug("node type :" + nt.getName());
                    nlist.add(nt);
                }
            }
            return nlist.toArray(new NodeType[nlist.size()]);
        } else {
            return newNodeTypes;
        }
    } catch (InvalidNodeTypeDefinitionException ex) {
        ex.printStackTrace();
    } catch (NodeTypeExistsException ex) {
        ex.printStackTrace();
    } catch (UnsupportedRepositoryOperationException ex) {
        ex.printStackTrace();
    } catch (ParseException ex) {
        ex.printStackTrace();
    } finally {
        if (reader != null) {
            reader.close();
        }
    }
    return null;

}

From source file:at.tuwien.minimee.migration.parser.Jip_Parser.java

/**
 * Reads the line matching the LAST_LINE_MATCH_BEFORE_PARSING bofore
 * beginning to read and get the information
 * //from   ww w .  j a  v  a 2s .  c o m
 * @param fileToRead
 * @return
 */
public double getTotalTime(String fileToRead) {
    Double total_time = 0.0;
    try {
        /*
         * Sets up a file reader to read the file passed on the command line
         * one character at a time
         */
        FileReader input = new FileReader(fileToRead);

        try {
            /*
            * Filter FileReader through a Buffered read to read a line at a
            * time
            */
            BufferedReader bufRead = new BufferedReader(input);

            String line; // String that holds current file line

            // Read first line
            line = bufRead.readLine();

            // Read through file one line at time. Print line # and line
            while (line != null) {
                if (line.contains(LAST_LINE_MATCH_BEFORE_PARSING))
                    break;
                line = bufRead.readLine();
            }

            // read next line containing the first info
            line = bufRead.readLine();

            total_time = interpretLine(line);
        } finally {
            input.close();
        }
    } catch (IOException e) {
        log.error(e);
    }

    return total_time;
}

From source file:org.deri.iris.queryrewriting.SQLRewritingTest.java

public void testSQLRewriting() throws Exception {

    // Configuration.
    final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE;
    final RewritingLanguage rewLang = RewritingLanguage.UCQ;
    final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC;
    final NCCheck ncCheckStrategy = NCCheck.NONE;

    LOGGER.info("Decomposition: " + decomposition.name());
    LOGGER.info("Rewriting Language: " + rewLang.name());
    LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name());
    LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name());

    // Read the test-cases file

    final File testSuiteFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases.txt");

    final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile));

    final String creationDate = dateFormat.format(new Date());

    // Summary reporting
    final String summaryPrefix = StringUtils.join(creationDate, "-", decomposition.name(), "-", rewLang.name(),
            "-", subchkStrategy.name(), "-", ncCheckStrategy.name());

    final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "size-summary.csv"));
    final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ',');

    final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "time-summary.csv"));
    final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ',');

    final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "cache-summary.csv"));
    final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ',');

    final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "memory-summary.csv"));
    final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ',');

    sizeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingSizeReportHeader());
    timeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingTimeReportHeader());
    cacheSummaryWriter.writeNext(ReportingUtils.getSummaryCachingReportHeader());
    memorySummaryWriter.writeNext(ReportingUtils.getSummaryMemoryReportHeader());

    // Compute the rewriting for each test ontology.
    for (final String testName : tests) {

        // Read the next test case on the list
        final File testFile = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), testName + ".dtg");

        // Create the Directory where to store the test results
        final File outTestDir = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), testName);
        if (!outTestDir.exists()) {
            if (outTestDir.mkdir()) {
                LOGGER.info("Created output directory: " + testName);
            } else {
                LOGGER.fatal("Error creating output directory");
            }//  w w  w .j a  va 2 s  .c  o m
        }

        LOGGER.info("Processing file: " + testName);

        // Read the content of the current program
        final FileReader fr = new FileReader(testFile);
        final StringBuilder sb = new StringBuilder();
        int ch = -1;
        while ((ch = fr.read()) >= 0) {
            sb.append((char) ch);
        }
        final String program = sb.toString();
        fr.close();

        // Parse the program
        final Parser parser = new Parser();
        parser.parse(program);

        // Get the rules
        final List<IRule> rules = parser.getRules();

        // Get the queries
        final List<IQuery> queryHeads = parser.getQueries();

        // Get the TGDs from the set of rules
        final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads);

        // Convert the query bodies in rules
        final List<IRule> bodies = new LinkedList<IRule>(rules);
        bodies.removeAll(tgds);

        final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads);

        // Get the configuration
        final Map<IPredicate, IRelation> conf = parser.getDirectives();
        if (conf.containsKey(BasicFactory.getInstance().createPredicate("DBConnection", 8))) {
            StorageManager.getInstance();
            StorageManager.configure(conf);
        } else {
            LOGGER.error("Missing DB connection parameters.");
            throw new ConfigurationException("Missing DB connection parameters.");

        }

        // Get the SBox rules from the set of rules
        final List<IRule> sbox = RewritingUtils.getSBoxRules(rules, queryHeads);

        // get the constraints from the set of rules
        final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);
        LOGGER.info("Expressivity: " + exprs.toString());

        if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY))
            throw new EvaluationException("Only Linear and Sticky TGDs are supported for rewriting.");

        // compute the dependency graph

        LOGGER.debug("Computing position dependencies.");
        // long depGraphMem = MonitoringUtils.getHeapUsage();
        long posDepTime = System.currentTimeMillis();
        Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);
        posDepTime = System.currentTimeMillis() - posDepTime;

        // Setup caching
        CacheManager.setupCaching();

        // if linear TGDs, compute the atom coverage graph.
        LOGGER.debug("Computing atom coverage graph.");
        long atomCoverGraphTime = System.currentTimeMillis();
        if (exprs.contains(Expressivity.LINEAR)) {
            deps = DepGraphUtils.computeAtomCoverageGraph(deps);
        }
        atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime;
        // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem;

        // rewriting constraints
        // long ncRewMem = MonitoringUtils.getHeapUsage();
        final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC,
                RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE);
        long ncRewTime = System.currentTimeMillis();
        final Set<IRule> rewrittenConstraints = Sets.newHashSet();
        if (!ncCheckStrategy.equals(NCCheck.NONE)) {
            for (final IRule c : constraints) {
                rewrittenConstraints
                        .addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs));
            }
        }
        ncRewTime = System.currentTimeMillis() - ncRewTime;
        // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage();
        LOGGER.debug("Finished rewriting constraints.");

        // dump the rewritten constraints:
        File outFile = FileUtils.getFile(outTestDir, testName.concat("_cns.dtg"));
        final FileWriter cnsFW = new FileWriter(outFile);
        IOUtils.writeLines(rewrittenConstraints, IOUtils.LINE_SEPARATOR, cnsFW);
        cnsFW.close();

        // Compute the Rewriting
        final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy,
                ncCheckStrategy);
        for (final IRule q : queries) {

            // Setup caching
            CacheManager.setupCaching();

            final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate()
                    .getPredicateSymbol();

            // Setup reporting
            final Reporter rep = Reporter.getInstance(true);
            Reporter.setupReporting();
            Reporter.setQuery(queryPredicate);
            Reporter.setOntology(testName);
            rep.setValue(RewMetric.DEPGRAPH_TIME, posDepTime);

            LOGGER.info("Processing query: ".concat(q.toString()));
            // final long rewMem = MonitoringUtils.getHeapUsage();
            final long overallTime = System.currentTimeMillis();
            final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs);
            rep.setValue(RewMetric.OVERALL_TIME, System.currentTimeMillis() - overallTime);

            // rep.setValue(RewMetric.REW_MEM, MonitoringUtils.getHeapUsage() - rewMem);
            // rep.setValue(RewMetric.DEPGRAPH_MEM, depGraphMem);
            rep.setValue(RewMetric.REW_SIZE, (long) rewriting.size());
            rep.setValue(RewMetric.JOIN_COUNT, RewritingUtils.joinCount(rewriting));
            rep.setValue(RewMetric.ATOM_COUNT, RewritingUtils.atomsCount(rewriting));
            rep.setValue(RewMetric.REW_CNS_COUNT, (long) rewrittenConstraints.size());
            rep.setValue(RewMetric.REW_CNS_TIME, ncRewTime);
            // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem);

            // Other metrics
            rep.setValue(RewMetric.OVERHEAD_TIME,
                    rep.getValue(RewMetric.OVERALL_TIME) - rep.getValue(RewMetric.REW_TIME));

            // Caching size metrics
            rep.setValue(RewMetric.MAX_COVERING_CACHE_SIZE, CoveringCache.getCache().size(CacheType.COVERING));
            rep.setValue(RewMetric.MAX_NON_COVERING_CACHE_SIZE,
                    CoveringCache.getCache().size(CacheType.NOT_COVERING));
            rep.setValue(RewMetric.MAX_MAPSTO_CACHE_SIZE, MapsToCache.size(MapsToCache.CacheType.MAPSTO));
            rep.setValue(RewMetric.MAX_NOT_MAPSTO_CACHE_SIZE,
                    MapsToCache.size(MapsToCache.CacheType.NOT_MAPSTO));
            rep.setValue(RewMetric.MAX_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_NON_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_RENAMING_CACHE_SIZE, RenamingCache.size());
            rep.setValue(RewMetric.MAX_MGU_CACHE_SIZE, MGUCache.size());

            // Create a file to store the rewriting results.

            outFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.dtg"));
            final FileWriter rewFW = new FileWriter(outFile);

            rewFW.write("/// Query: " + q + "///\n");
            rewFW.write("/// Ontology: " + testName + "///");
            rewFW.write("/// Created on: " + creationDate + " ///\n");
            rewFW.write("/// Rules in the program: " + rules.size() + " ///\n");
            rewFW.write("/// TGDs in the program: " + tgds.size() + " ///\n");
            rewFW.write("/// Constraints in the program: " + constraints.size() + " ///\n");
            rewFW.write("/// Theory expressivity: " + exprs.toString() + " ///\n");
            rewFW.write("/// Decomposition: " + decomposition.name() + " ///\n");
            rewFW.write("/// Subsumption Check Strategy: " + subchkStrategy.name() + " ///\n");
            rewFW.write("/// Negative Constraints Check Strategy: " + ncCheckStrategy.name() + " ///\n");
            rewFW.write(IOUtils.LINE_SEPARATOR);

            LOGGER.info("Writing the output at: " + outFile.getAbsolutePath());

            // dump metrics for individual queries.
            rewFW.write(rep.getReport());

            rewFW.write(IOUtils.LINE_SEPARATOR);
            rewFW.write(IOUtils.LINE_SEPARATOR);

            rewFW.write("/// Rewritten Program ///\n");
            final Set<ILiteral> newHeads = new HashSet<ILiteral>();
            for (final IRule qr : rewriting) {
                newHeads.add(qr.getHead().iterator().next());
                rewFW.write(qr + "\n");
            }
            rewFW.write("\n");
            for (final ILiteral h : newHeads) {
                rewFW.write("?- " + h + ".\n");
            }
            rewFW.write("\n");
            rewFW.flush();
            rewFW.close();

            // dump summary metrics.
            sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics());
            timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics());
            cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics());
            memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics());
            sizeSummaryWriter.flush();
            timeSummaryWriter.flush();
            cacheSummaryWriter.flush();
            memorySummaryWriter.flush();

            if (sbox.size() > 0) {

                // Produce the rewriting according to the Storage Box
                final IQueryRewriter ndmRewriter = new NDMRewriter(sbox);
                // final Set<ILiteral> newHeads = new HashSet<ILiteral>();
                final Set<IRule> sboxRew = new LinkedHashSet<IRule>();
                for (final IRule r : rewriting) {
                    // Create a file to store the rewriting results as Datalog Rules
                    LOGGER.debug("-- Processing rewriting: " + r);
                    sboxRew.addAll(ndmRewriter.getRewriting(r));
                }

                // dump the rewritten sbox rewriting:
                final File sboxFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_sbox_rew.dtg"));
                final FileWriter sboxFW = new FileWriter(sboxFile);
                IOUtils.writeLines(sboxRew, IOUtils.LINE_SEPARATOR, sboxFW);
                sboxFW.close();

                // Produce a SQL rewriting
                final SQLRewriter sqlRewriter = new SQLRewriter(sboxRew);
                final String sqlRew = sqlRewriter.getUCQSQLRewriting("", 1000, 0);
                final File sqlFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.sql"));
                final FileWriter sqlFW = new FileWriter(sqlFile);
                IOUtils.write(sqlRew, sqlFW);
                sqlFW.close();

                // Execute the SQL rewriting
                LOGGER.info("Executing SQL Rewriting");

                long duration = System.nanoTime();
                final IRelation result = StorageManager.executeQuery(sqlRew);
                duration = (System.nanoTime() - duration) / 1000000;
                LOGGER.info(result.size() + " tuples in " + duration + " [ms]\n");
            }
        }
    }
    sizeSummaryWriter.close();
    timeSummaryWriter.close();
    cacheSummaryWriter.close();
    memorySummaryWriter.close();

}