Example usage for org.apache.commons.io IOUtils writeLines

List of usage examples for org.apache.commons.io IOUtils writeLines

Introduction

In this page you can find the example usage for org.apache.commons.io IOUtils writeLines.

Prototype

public static void writeLines(Collection lines, String lineEnding, Writer writer) throws IOException 

Source Link

Document

Writes the toString() value of each item in a collection to a Writer line by line, using the specified line ending.

Usage

From source file:com.streamsets.pipeline.stage.origin.spooldir.TestSpoolDirSource.java

@Test
public void testWithMultipleThreads() throws Exception {
    // set up multiple test files
    File f = new File("target", UUID.randomUUID().toString());
    Assert.assertTrue(f.mkdirs());//from www  . j  av a2  s . c  o  m

    final int numFiles = 10;
    for (int i = 0; i < numFiles; i++) {
        FileOutputStream outputStream = new FileOutputStream(
                new File(f.getAbsolutePath(), "-file-" + i + ".log"));
        // each file has 5 lines
        IOUtils.writeLines(ImmutableList.of("1", "2", "3", "4", "5"), "\n", outputStream);
        outputStream.close();
    }

    readFilesMultipleThreads(f.getAbsolutePath(), 2);
    readFilesMultipleThreads(f.getAbsolutePath(), 5);
    readFilesMultipleThreads(f.getAbsolutePath(), 10);

}

From source file:com.streamsets.pipeline.stage.origin.spooldir.TestSpoolDirSource.java

@Test
public void testWithMultipleThreadsInitialOffsets() throws Exception {
    // set up multiple test files
    File f = new File("target", UUID.randomUUID().toString());
    Assert.assertTrue(f.mkdirs());/*from  w  ww  .  j ava  2s .c  o  m*/

    final int numFiles = 10;
    for (int i = 0; i < numFiles; i++) {
        FileOutputStream outputStream = new FileOutputStream(
                new File(f.getAbsolutePath(), "file-" + i + ".log"));
        // each file has 5 lines
        IOUtils.writeLines(ImmutableList.of("1", "2", "3", "4", "5"), "\n", outputStream);
        outputStream.close();
    }

    // let the first 2 files, file-0.log and file-3.log, were processed and
    // file-2.log was processed 1 line/record
    // file-1.log will be skipped since is less then file-3.log
    Map<String, String> lastSourceOffsetMap = ImmutableMap.of(SpoolDirSource.OFFSET_VERSION, OFFSET_VERSION_ONE,
            "file-0.log", "{\"POS\":\"-1\"}", "file-2.log", "{\"POS\":\"2\"}", "file-3.log",
            "{\"POS\":\"-1\"}");

    SpoolDirConfigBean conf = new SpoolDirConfigBean();
    conf.dataFormat = DataFormat.TEXT;
    conf.spoolDir = f.getAbsolutePath();
    conf.batchSize = 10;
    conf.overrunLimit = 100;
    conf.poolingTimeoutSecs = 1;
    conf.filePattern = "file-[0-9].log";
    conf.pathMatcherMode = PathMatcherMode.GLOB;
    conf.maxSpoolFiles = 10;
    conf.initialFileToProcess = null;
    conf.dataFormatConfig.compression = Compression.NONE;
    conf.dataFormatConfig.filePatternInArchive = "*";
    conf.errorArchiveDir = null;
    conf.postProcessing = PostProcessingOptions.NONE;
    conf.retentionTimeMins = 10;
    conf.dataFormatConfig.textMaxLineLen = 10;
    conf.dataFormatConfig.onParseError = OnParseError.ERROR;
    conf.dataFormatConfig.maxStackTraceLines = 0;
    conf.allowLateDirectory = false;
    conf.numberOfThreads = 10;

    SpoolDirSource source = new SpoolDirSource(conf);
    PushSourceRunner runner = new PushSourceRunner.Builder(SpoolDirDSource.class, source).addOutputLane("lane")
            .build();

    AtomicInteger batchCount = new AtomicInteger(0);
    final List<Record> records = Collections.synchronizedList(new ArrayList<>(10));
    runner.runInit();

    final int maxBatchSize = 10;

    try {
        runner.runProduce(lastSourceOffsetMap, maxBatchSize, output -> {
            batchCount.incrementAndGet();

            synchronized (records) {
                records.addAll(output.getRecords().get("lane"));
            }

            if (records.size() == 34 || batchCount.get() > 10) {
                runner.setStop();
            }
        });

        runner.waitOnProduce();
        Assert.assertTrue(batchCount.get() > 1);
        TestOffsetUtil.compare("file-9.log::-1", runner.getOffsets());
        Assert.assertEquals(34, records.size());
    } finally {
        runner.runDestroy();
    }

}

From source file:com.streamsets.pipeline.stage.origin.spooldir.TestSpoolDirSource.java

public void errorFile(boolean preview) throws Exception {
    File spoolDir = new File("target", UUID.randomUUID().toString());
    spoolDir.mkdir();/*from   w ww . j av  a 2 s.c  o m*/
    File errorDir = new File("target", UUID.randomUUID().toString());
    errorDir.mkdir();

    SpoolDirConfigBean conf = new SpoolDirConfigBean();
    conf.dataFormat = DataFormat.JSON;
    conf.spoolDir = spoolDir.getAbsolutePath();
    conf.batchSize = 10;
    conf.overrunLimit = 100;
    conf.poolingTimeoutSecs = 1;
    conf.filePattern = "file-[0-9].log";
    conf.pathMatcherMode = PathMatcherMode.GLOB;
    conf.maxSpoolFiles = 10;
    conf.initialFileToProcess = "file-0.log";
    conf.dataFormatConfig.compression = Compression.NONE;
    conf.dataFormatConfig.filePatternInArchive = "*";
    conf.dataFormatConfig.csvHeader = CsvHeader.WITH_HEADER;
    conf.errorArchiveDir = errorDir.getAbsolutePath();
    conf.postProcessing = PostProcessingOptions.NONE;
    conf.retentionTimeMins = 10;
    conf.allowLateDirectory = false;
    conf.dataFormatConfig.jsonContent = JsonMode.MULTIPLE_OBJECTS;
    conf.dataFormatConfig.onParseError = OnParseError.ERROR;

    FileOutputStream outputStream = new FileOutputStream(new File(conf.spoolDir, "file-0.log"));
    // Incorrect JSON
    IOUtils.writeLines(ImmutableList.of("{a"), "\n", outputStream);
    outputStream.close();

    SpoolDirSource source = new SpoolDirSource(conf);
    PushSourceRunner runner = new PushSourceRunner.Builder(SpoolDirDSource.class, source).setPreview(preview)
            .setOnRecordError(OnRecordError.TO_ERROR).addOutputLane("lane").build();
    final List<Record> records = Collections.synchronizedList(new ArrayList<>(10));

    runner.runInit();
    try {
        runner.runProduce(new HashMap<>(), 10, output -> {
            synchronized (records) {
                records.addAll(output.getRecords().get("lane"));
            }
            runner.setStop();
        });

        runner.waitOnProduce();

        // Verify proper record
        Assert.assertNotNull(records);
        Assert.assertEquals(0, records.size());

        // Depending on the preview flag, we should see the file in one directory or the other
        if (preview) {
            Assert.assertEquals(0, errorDir.list().length);
            Assert.assertEquals(1, spoolDir.list().length);
        } else {
            Assert.assertEquals(1, errorDir.list().length);
            Assert.assertEquals(0, spoolDir.list().length);
        }

    } finally {
        runner.runDestroy();
    }
}

From source file:org.apache.sling.distribution.queue.impl.simple.SimpleDistributionQueueCheckpoint.java

@Override
public void run() {
    String fileName = queue.getName() + "-checkpoint";
    File checkpointFile = new File(checkpointDirectory, fileName + "-new");
    log.debug("started checkpointing");

    try {/*www .jav a  2  s  .c  o m*/
        if (checkpointFile.exists()) {
            assert checkpointFile.delete();
        }
        assert checkpointFile.createNewFile();
        Collection<String> lines = new LinkedList<String>();
        FileOutputStream fileOutputStream = new FileOutputStream(checkpointFile);
        for (DistributionQueueEntry queueEntry : queue.getItems(0, -1)) {
            DistributionQueueItem item = queueEntry.getItem();
            String packageId = item.getPackageId();
            StringWriter w = new StringWriter();
            JSONWriter jsonWriter = new JSONWriter(w);
            jsonWriter.object();
            for (Map.Entry<String, Object> entry : item.entrySet()) {
                jsonWriter.key(entry.getKey());
                Object value = entry.getValue();
                boolean isArray = value instanceof String[];
                if (isArray) {
                    jsonWriter.array();
                    for (String s : ((String[]) value)) {
                        jsonWriter.value(s);
                    }
                    jsonWriter.endArray();
                } else {
                    jsonWriter.value(value);
                }
            }
            jsonWriter.endObject();
            lines.add(packageId + " " + w.toString());
        }
        log.debug("parsed {} items", lines.size());
        IOUtils.writeLines(lines, Charset.defaultCharset().name(), fileOutputStream);
        fileOutputStream.flush();
        fileOutputStream.close();
        boolean success = checkpointFile.renameTo(new File(checkpointDirectory, fileName));
        log.debug("checkpoint succeeded: {}", success);
    } catch (Exception e) {
        log.error("failed checkpointing for queue {}", queue.getName());
    }
}

From source file:org.artifactory.webapp.servlet.TraceLoggingResponse.java

/**
 * Writes the request info and messages to the response
 *
 * @param requestId   Request trace context ID
 * @param methodName  HTTP method name/*from   w  ww. j  a v a  2 s.c  o  m*/
 * @param username    Authenticated user name
 * @param requestPath Request repo path id
 * @throws IOException
 */
public void sendResponse(String requestId, String methodName, String username, String requestPath)
        throws IOException {
    Writer writer = null;
    try {
        artifactoryResponse.setContentType(MediaType.TEXT_PLAIN.toString());
        writer = artifactoryResponse.getWriter();
        writer.append("Request ID: ").append(requestId).append("\n");
        writer.append("Repo Path ID: ").append(requestPath).append("\n");
        writer.append("Method Name: ").append(methodName).append("\n");
        writer.append("User: ").append(username).append("\n");
        writer.append("Time: ").append(time).append("\n");
        writer.append("Thread: ").append(threadName).append("\n");
        writer.flush();
        writer.append("Steps: ").append("\n");
        IOUtils.writeLines(logAggregator, null, writer);
        writer.flush();
        artifactoryResponse.sendSuccess();
    } finally {
        if (writer != null) {
            writer.close();
        }
    }
}

From source file:org.canova.api.records.reader.impl.LineReaderTest.java

@Test
public void testLineReaderWithInputStreamInputSplit() throws Exception {
    File tmpdir = new File("tmpdir");
    tmpdir.mkdir();/*  w  w w . ja  v  a2 s.c om*/

    File tmp1 = new File("tmpdir/tmp1.txt.gz");

    OutputStream os = new GZIPOutputStream(new FileOutputStream(tmp1, false));
    IOUtils.writeLines(Arrays.asList("1", "2", "3", "4", "5", "6", "7", "8", "9"), null, os);
    os.flush();
    os.close();

    InputSplit split = new InputStreamInputSplit(new GZIPInputStream(new FileInputStream(tmp1)));

    RecordReader reader = new LineRecordReader();
    reader.initialize(split);

    int count = 0;
    while (reader.hasNext()) {
        assertEquals(1, reader.next().size());
        count++;
    }

    assertEquals(9, count);

    FileUtils.deleteDirectory(tmpdir);
}

From source file:org.codehaus.mojo.macker.CommandLineFileTest.java

private void writeArgsFile(List/*<String>*/ lines) throws IOException {
    Writer out = new OutputStreamWriter(new FileOutputStream(TEST_FILE), "UTF-8");
    IOUtils.writeLines(lines, "\n", out);
    out.flush();//  ww w .  j  a  v  a  2  s. c  om
    out.close();
}

From source file:org.datavec.api.records.reader.impl.LineReaderTest.java

@Test
public void testLineReaderWithInputStreamInputSplit() throws Exception {
    String tempDir = System.getProperty("java.io.tmpdir");
    File tmpdir = new File(tempDir, "tmpdir");
    tmpdir.mkdir();/* www.  j  ava  2s.  com*/

    File tmp1 = new File(tmpdir, "tmp1.txt.gz");

    OutputStream os = new GZIPOutputStream(new FileOutputStream(tmp1, false));
    IOUtils.writeLines(Arrays.asList("1", "2", "3", "4", "5", "6", "7", "8", "9"), null, os);
    os.flush();
    os.close();

    InputSplit split = new InputStreamInputSplit(new GZIPInputStream(new FileInputStream(tmp1)));

    RecordReader reader = new LineRecordReader();
    reader.initialize(split);

    int count = 0;
    while (reader.hasNext()) {
        assertEquals(1, reader.next().size());
        count++;
    }

    assertEquals(9, count);

    try {
        FileUtils.deleteDirectory(tmpdir);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.deri.iris.queryrewriting.QueryRewritingTest.java

public void testFORewriting() throws Exception {

    // Configuration.
    final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE;
    final RewritingLanguage rewLang = RewritingLanguage.UCQ;
    final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC;
    final NCCheck ncCheckStrategy = NCCheck.NONE;

    LOGGER.info("Decomposition: " + decomposition.name());
    LOGGER.info("Rewriting Language: " + rewLang.name());
    LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name());
    LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name());

    // Read the test-cases file

    final File testSuiteFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases.txt");

    final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile));

    final String creationDate = dateFormat.format(new Date());

    // Summary reporting
    final String summaryPrefix = StringUtils.join(creationDate, "-", decomposition.name(), "-", rewLang.name(),
            "-", subchkStrategy.name(), "-", ncCheckStrategy.name());

    final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "size-summary.csv"));
    final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ',');

    final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "time-summary.csv"));
    final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ',');

    final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "cache-summary.csv"));
    final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ',');

    final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "memory-summary.csv"));
    final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ',');

    sizeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingSizeReportHeader());
    timeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingTimeReportHeader());
    cacheSummaryWriter.writeNext(ReportingUtils.getSummaryCachingReportHeader());
    memorySummaryWriter.writeNext(ReportingUtils.getSummaryMemoryReportHeader());

    // Compute the rewriting for each test ontology.
    for (final String testName : tests) {

        // Read the next test case on the list
        final File testFile = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), testName + ".dtg");

        // Create the Directory where to store the test results
        final File outTestDir = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), testName);
        if (!outTestDir.exists()) {
            if (outTestDir.mkdir()) {
                LOGGER.info("Created output directory: " + testName);
            } else {
                LOGGER.fatal("Error creating output directory");
            }/*from   w w  w  .j  a  va  2 s  .  com*/
        }

        LOGGER.info("Processing file: " + testName);

        // Read the content of the current program
        final FileReader fr = new FileReader(testFile);
        final StringBuilder sb = new StringBuilder();
        int ch = -1;
        while ((ch = fr.read()) >= 0) {
            sb.append((char) ch);
        }
        final String program = sb.toString();
        fr.close();

        // Parse the program
        final Parser parser = new Parser();
        parser.parse(program);

        // Get the rules
        final List<IRule> rules = parser.getRules();

        // Get the queries
        final List<IQuery> queryHeads = parser.getQueries();

        // Get the TGDs from the set of rules
        final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads);

        // Convert the query bodies in rules
        final List<IRule> bodies = new LinkedList<IRule>(rules);
        bodies.removeAll(tgds);

        final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads);

        // get the constraints from the set of rules
        final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);
        LOGGER.info("Expressivity: " + exprs.toString());

        if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY))
            throw new EvaluationException("Only Linear and Sticky TGDs are supported for rewriting.");

        // compute the dependency graph

        LOGGER.debug("Computing position dependencies.");
        // long depGraphMem = MonitoringUtils.getHeapUsage();
        long posDepTime = System.currentTimeMillis();
        Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);
        posDepTime = System.currentTimeMillis() - posDepTime;

        // Setup caching
        CacheManager.setupCaching();

        // if linear TGDs, compute the atom coverage graph.
        LOGGER.debug("Computing atom coverage graph.");
        long atomCoverGraphTime = System.currentTimeMillis();
        if (exprs.contains(Expressivity.LINEAR)) {
            deps = DepGraphUtils.computeAtomCoverageGraph(deps);
        }
        atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime;
        // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem;

        // rewriting constraints
        // long ncRewMem = MonitoringUtils.getHeapUsage();
        final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC,
                RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE);
        long ncRewTime = System.currentTimeMillis();
        final Set<IRule> rewrittenConstraints = Sets.newHashSet();
        if (!ncCheckStrategy.equals(NCCheck.NONE)) {
            for (final IRule c : constraints) {
                rewrittenConstraints
                        .addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs));
            }
        }
        ncRewTime = System.currentTimeMillis() - ncRewTime;
        // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage();
        LOGGER.debug("Finished rewriting constraints.");

        // dump the rewritten constraints:
        File outFile = FileUtils.getFile(outTestDir, testName.concat("_cns.dtg"));
        final FileWriter cnsFW = new FileWriter(outFile);
        IOUtils.writeLines(rewrittenConstraints, IOUtils.LINE_SEPARATOR, cnsFW);
        cnsFW.close();

        // Compute the Rewriting
        final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy,
                ncCheckStrategy);
        for (final IRule q : queries) {

            // Setup caching
            CacheManager.setupCaching();

            final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate()
                    .getPredicateSymbol();

            // Setup reporting
            final Reporter rep = Reporter.getInstance(true);
            Reporter.setupReporting();
            Reporter.setQuery(queryPredicate);
            Reporter.setOntology(testName);
            rep.setValue(RewMetric.DEPGRAPH_TIME, posDepTime);

            LOGGER.info("Processing query: ".concat(q.toString()));
            // final long rewMem = MonitoringUtils.getHeapUsage();
            final long overallTime = System.currentTimeMillis();
            final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs);
            rep.setValue(RewMetric.OVERALL_TIME, System.currentTimeMillis() - overallTime);
            // rep.setValue(RewMetric.REW_MEM, MonitoringUtils.getHeapUsage() - rewMem);
            // rep.setValue(RewMetric.DEPGRAPH_MEM, depGraphMem);
            rep.setValue(RewMetric.REW_SIZE, (long) rewriting.size());
            rep.setValue(RewMetric.JOIN_COUNT, RewritingUtils.joinCount(rewriting));
            rep.setValue(RewMetric.ATOM_COUNT, RewritingUtils.atomsCount(rewriting));
            rep.setValue(RewMetric.REW_CNS_COUNT, (long) rewrittenConstraints.size());
            rep.setValue(RewMetric.REW_CNS_TIME, ncRewTime);
            // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem);

            // Other metrics
            rep.setValue(RewMetric.OVERHEAD_TIME,
                    rep.getValue(RewMetric.OVERALL_TIME) - rep.getValue(RewMetric.REW_TIME));

            // Caching size metrics
            rep.setValue(RewMetric.MAX_COVERING_CACHE_SIZE, CoveringCache.getCache().size(CacheType.COVERING));
            rep.setValue(RewMetric.MAX_NON_COVERING_CACHE_SIZE,
                    CoveringCache.getCache().size(CacheType.NOT_COVERING));
            rep.setValue(RewMetric.MAX_MAPSTO_CACHE_SIZE, MapsToCache.size(MapsToCache.CacheType.MAPSTO));
            rep.setValue(RewMetric.MAX_NOT_MAPSTO_CACHE_SIZE,
                    MapsToCache.size(MapsToCache.CacheType.NOT_MAPSTO));
            rep.setValue(RewMetric.MAX_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_NON_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_RENAMING_CACHE_SIZE, RenamingCache.size());
            rep.setValue(RewMetric.MAX_MGU_CACHE_SIZE, MGUCache.size());

            // Create a file to store the rewriting results.

            outFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.dtg"));
            final FileWriter rewFW = new FileWriter(outFile);

            rewFW.write("/// Query: " + q + "///\n");
            rewFW.write("/// Ontology: " + testName + "///");
            rewFW.write("/// Created on: " + creationDate + " ///\n");
            rewFW.write("/// Rules in the program: " + rules.size() + " ///\n");
            rewFW.write("/// TGDs in the program: " + tgds.size() + " ///\n");
            rewFW.write("/// Constraints in the program: " + constraints.size() + " ///\n");
            rewFW.write("/// Theory expressivity: " + exprs.toString() + " ///\n");
            rewFW.write("/// Decomposition: " + decomposition.name() + " ///\n");
            rewFW.write("/// Subsumption Check Strategy: " + subchkStrategy.name() + " ///\n");
            rewFW.write("/// Negative Constraints Check Strategy: " + ncCheckStrategy.name() + " ///\n");
            rewFW.write(IOUtils.LINE_SEPARATOR);

            LOGGER.info("Writing the output at: " + outFile.getAbsolutePath());

            // dump metrics for individual queries.
            rewFW.write(rep.getReport());

            rewFW.write(IOUtils.LINE_SEPARATOR);
            rewFW.write(IOUtils.LINE_SEPARATOR);

            rewFW.write("/// Rewritten Program ///\n");
            final Set<ILiteral> newHeads = new HashSet<ILiteral>();
            for (final IRule qr : rewriting) {
                newHeads.add(qr.getHead().iterator().next());
                rewFW.write(qr + "\n");
            }
            rewFW.write("\n");
            for (final ILiteral h : newHeads) {
                rewFW.write("?- " + h + ".\n");
            }
            rewFW.write("\n");
            rewFW.flush();
            rewFW.close();

            // dump summary metrics.
            sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics());
            timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics());
            cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics());
            memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics());
            sizeSummaryWriter.flush();
            timeSummaryWriter.flush();
            cacheSummaryWriter.flush();
            memorySummaryWriter.flush();
        }
    }
    sizeSummaryWriter.close();
    timeSummaryWriter.close();
    cacheSummaryWriter.close();
    memorySummaryWriter.close();
}

From source file:org.deri.iris.queryrewriting.SQLRewritingTest.java

public void testSQLRewriting() throws Exception {

    // Configuration.
    final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE;
    final RewritingLanguage rewLang = RewritingLanguage.UCQ;
    final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC;
    final NCCheck ncCheckStrategy = NCCheck.NONE;

    LOGGER.info("Decomposition: " + decomposition.name());
    LOGGER.info("Rewriting Language: " + rewLang.name());
    LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name());
    LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name());

    // Read the test-cases file

    final File testSuiteFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases.txt");

    final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile));

    final String creationDate = dateFormat.format(new Date());

    // Summary reporting
    final String summaryPrefix = StringUtils.join(creationDate, "-", decomposition.name(), "-", rewLang.name(),
            "-", subchkStrategy.name(), "-", ncCheckStrategy.name());

    final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "size-summary.csv"));
    final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ',');

    final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "time-summary.csv"));
    final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ',');

    final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "cache-summary.csv"));
    final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ',');

    final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "memory-summary.csv"));
    final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ',');

    sizeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingSizeReportHeader());
    timeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingTimeReportHeader());
    cacheSummaryWriter.writeNext(ReportingUtils.getSummaryCachingReportHeader());
    memorySummaryWriter.writeNext(ReportingUtils.getSummaryMemoryReportHeader());

    // Compute the rewriting for each test ontology.
    for (final String testName : tests) {

        // Read the next test case on the list
        final File testFile = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), testName + ".dtg");

        // Create the Directory where to store the test results
        final File outTestDir = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), testName);
        if (!outTestDir.exists()) {
            if (outTestDir.mkdir()) {
                LOGGER.info("Created output directory: " + testName);
            } else {
                LOGGER.fatal("Error creating output directory");
            }/*from   w  w  w  .ja v  a2 s .  c  o m*/
        }

        LOGGER.info("Processing file: " + testName);

        // Read the content of the current program
        final FileReader fr = new FileReader(testFile);
        final StringBuilder sb = new StringBuilder();
        int ch = -1;
        while ((ch = fr.read()) >= 0) {
            sb.append((char) ch);
        }
        final String program = sb.toString();
        fr.close();

        // Parse the program
        final Parser parser = new Parser();
        parser.parse(program);

        // Get the rules
        final List<IRule> rules = parser.getRules();

        // Get the queries
        final List<IQuery> queryHeads = parser.getQueries();

        // Get the TGDs from the set of rules
        final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads);

        // Convert the query bodies in rules
        final List<IRule> bodies = new LinkedList<IRule>(rules);
        bodies.removeAll(tgds);

        final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads);

        // Get the configuration
        final Map<IPredicate, IRelation> conf = parser.getDirectives();
        if (conf.containsKey(BasicFactory.getInstance().createPredicate("DBConnection", 8))) {
            StorageManager.getInstance();
            StorageManager.configure(conf);
        } else {
            LOGGER.error("Missing DB connection parameters.");
            throw new ConfigurationException("Missing DB connection parameters.");

        }

        // Get the SBox rules from the set of rules
        final List<IRule> sbox = RewritingUtils.getSBoxRules(rules, queryHeads);

        // get the constraints from the set of rules
        final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);
        LOGGER.info("Expressivity: " + exprs.toString());

        if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY))
            throw new EvaluationException("Only Linear and Sticky TGDs are supported for rewriting.");

        // compute the dependency graph

        LOGGER.debug("Computing position dependencies.");
        // long depGraphMem = MonitoringUtils.getHeapUsage();
        long posDepTime = System.currentTimeMillis();
        Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);
        posDepTime = System.currentTimeMillis() - posDepTime;

        // Setup caching
        CacheManager.setupCaching();

        // if linear TGDs, compute the atom coverage graph.
        LOGGER.debug("Computing atom coverage graph.");
        long atomCoverGraphTime = System.currentTimeMillis();
        if (exprs.contains(Expressivity.LINEAR)) {
            deps = DepGraphUtils.computeAtomCoverageGraph(deps);
        }
        atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime;
        // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem;

        // rewriting constraints
        // long ncRewMem = MonitoringUtils.getHeapUsage();
        final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC,
                RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE);
        long ncRewTime = System.currentTimeMillis();
        final Set<IRule> rewrittenConstraints = Sets.newHashSet();
        if (!ncCheckStrategy.equals(NCCheck.NONE)) {
            for (final IRule c : constraints) {
                rewrittenConstraints
                        .addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs));
            }
        }
        ncRewTime = System.currentTimeMillis() - ncRewTime;
        // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage();
        LOGGER.debug("Finished rewriting constraints.");

        // dump the rewritten constraints:
        File outFile = FileUtils.getFile(outTestDir, testName.concat("_cns.dtg"));
        final FileWriter cnsFW = new FileWriter(outFile);
        IOUtils.writeLines(rewrittenConstraints, IOUtils.LINE_SEPARATOR, cnsFW);
        cnsFW.close();

        // Compute the Rewriting
        final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy,
                ncCheckStrategy);
        for (final IRule q : queries) {

            // Setup caching
            CacheManager.setupCaching();

            final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate()
                    .getPredicateSymbol();

            // Setup reporting
            final Reporter rep = Reporter.getInstance(true);
            Reporter.setupReporting();
            Reporter.setQuery(queryPredicate);
            Reporter.setOntology(testName);
            rep.setValue(RewMetric.DEPGRAPH_TIME, posDepTime);

            LOGGER.info("Processing query: ".concat(q.toString()));
            // final long rewMem = MonitoringUtils.getHeapUsage();
            final long overallTime = System.currentTimeMillis();
            final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs);
            rep.setValue(RewMetric.OVERALL_TIME, System.currentTimeMillis() - overallTime);

            // rep.setValue(RewMetric.REW_MEM, MonitoringUtils.getHeapUsage() - rewMem);
            // rep.setValue(RewMetric.DEPGRAPH_MEM, depGraphMem);
            rep.setValue(RewMetric.REW_SIZE, (long) rewriting.size());
            rep.setValue(RewMetric.JOIN_COUNT, RewritingUtils.joinCount(rewriting));
            rep.setValue(RewMetric.ATOM_COUNT, RewritingUtils.atomsCount(rewriting));
            rep.setValue(RewMetric.REW_CNS_COUNT, (long) rewrittenConstraints.size());
            rep.setValue(RewMetric.REW_CNS_TIME, ncRewTime);
            // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem);

            // Other metrics
            rep.setValue(RewMetric.OVERHEAD_TIME,
                    rep.getValue(RewMetric.OVERALL_TIME) - rep.getValue(RewMetric.REW_TIME));

            // Caching size metrics
            rep.setValue(RewMetric.MAX_COVERING_CACHE_SIZE, CoveringCache.getCache().size(CacheType.COVERING));
            rep.setValue(RewMetric.MAX_NON_COVERING_CACHE_SIZE,
                    CoveringCache.getCache().size(CacheType.NOT_COVERING));
            rep.setValue(RewMetric.MAX_MAPSTO_CACHE_SIZE, MapsToCache.size(MapsToCache.CacheType.MAPSTO));
            rep.setValue(RewMetric.MAX_NOT_MAPSTO_CACHE_SIZE,
                    MapsToCache.size(MapsToCache.CacheType.NOT_MAPSTO));
            rep.setValue(RewMetric.MAX_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_NON_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_RENAMING_CACHE_SIZE, RenamingCache.size());
            rep.setValue(RewMetric.MAX_MGU_CACHE_SIZE, MGUCache.size());

            // Create a file to store the rewriting results.

            outFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.dtg"));
            final FileWriter rewFW = new FileWriter(outFile);

            rewFW.write("/// Query: " + q + "///\n");
            rewFW.write("/// Ontology: " + testName + "///");
            rewFW.write("/// Created on: " + creationDate + " ///\n");
            rewFW.write("/// Rules in the program: " + rules.size() + " ///\n");
            rewFW.write("/// TGDs in the program: " + tgds.size() + " ///\n");
            rewFW.write("/// Constraints in the program: " + constraints.size() + " ///\n");
            rewFW.write("/// Theory expressivity: " + exprs.toString() + " ///\n");
            rewFW.write("/// Decomposition: " + decomposition.name() + " ///\n");
            rewFW.write("/// Subsumption Check Strategy: " + subchkStrategy.name() + " ///\n");
            rewFW.write("/// Negative Constraints Check Strategy: " + ncCheckStrategy.name() + " ///\n");
            rewFW.write(IOUtils.LINE_SEPARATOR);

            LOGGER.info("Writing the output at: " + outFile.getAbsolutePath());

            // dump metrics for individual queries.
            rewFW.write(rep.getReport());

            rewFW.write(IOUtils.LINE_SEPARATOR);
            rewFW.write(IOUtils.LINE_SEPARATOR);

            rewFW.write("/// Rewritten Program ///\n");
            final Set<ILiteral> newHeads = new HashSet<ILiteral>();
            for (final IRule qr : rewriting) {
                newHeads.add(qr.getHead().iterator().next());
                rewFW.write(qr + "\n");
            }
            rewFW.write("\n");
            for (final ILiteral h : newHeads) {
                rewFW.write("?- " + h + ".\n");
            }
            rewFW.write("\n");
            rewFW.flush();
            rewFW.close();

            // dump summary metrics.
            sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics());
            timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics());
            cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics());
            memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics());
            sizeSummaryWriter.flush();
            timeSummaryWriter.flush();
            cacheSummaryWriter.flush();
            memorySummaryWriter.flush();

            if (sbox.size() > 0) {

                // Produce the rewriting according to the Storage Box
                final IQueryRewriter ndmRewriter = new NDMRewriter(sbox);
                // final Set<ILiteral> newHeads = new HashSet<ILiteral>();
                final Set<IRule> sboxRew = new LinkedHashSet<IRule>();
                for (final IRule r : rewriting) {
                    // Create a file to store the rewriting results as Datalog Rules
                    LOGGER.debug("-- Processing rewriting: " + r);
                    sboxRew.addAll(ndmRewriter.getRewriting(r));
                }

                // dump the rewritten sbox rewriting:
                final File sboxFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_sbox_rew.dtg"));
                final FileWriter sboxFW = new FileWriter(sboxFile);
                IOUtils.writeLines(sboxRew, IOUtils.LINE_SEPARATOR, sboxFW);
                sboxFW.close();

                // Produce a SQL rewriting
                final SQLRewriter sqlRewriter = new SQLRewriter(sboxRew);
                final String sqlRew = sqlRewriter.getUCQSQLRewriting("", 1000, 0);
                final File sqlFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.sql"));
                final FileWriter sqlFW = new FileWriter(sqlFile);
                IOUtils.write(sqlRew, sqlFW);
                sqlFW.close();

                // Execute the SQL rewriting
                LOGGER.info("Executing SQL Rewriting");

                long duration = System.nanoTime();
                final IRelation result = StorageManager.executeQuery(sqlRew);
                duration = (System.nanoTime() - duration) / 1000000;
                LOGGER.info(result.size() + " tuples in " + duration + " [ms]\n");
            }
        }
    }
    sizeSummaryWriter.close();
    timeSummaryWriter.close();
    cacheSummaryWriter.close();
    memorySummaryWriter.close();

}