Example usage for java.io PrintStream close

List of usage examples for java.io PrintStream close

Introduction

In this page you can find the example usage for java.io PrintStream close.

Prototype

public void close() 

Source Link

Document

Closes the stream.

Usage

From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.sensitivity.Analysis.java

@Override
public void run(CommandLine commandLine) throws Exception {
    PrintStream output = null;

    //parse required parameters
    parameterFile = new ParameterFile(new File(commandLine.getOptionValue("parameterFile")));
    parameters = loadParameters(new File(commandLine.getOptionValue("parameters")));
    metric = Integer.parseInt(commandLine.getOptionValue("metric"));

    //parse optional parameters
    if (commandLine.hasOption("band")) {
        bandWidth = Integer.parseInt(commandLine.getOptionValue("band"));
    }//from  www.j a  v a 2  s.  com

    if (commandLine.hasOption("threshold")) {
        threshold = Double.parseDouble(commandLine.getOptionValue("threshold"));
    }

    //if analyzing hypervolume, require the hypervolume option
    if (metric == 0) {
        if (commandLine.hasOption("hypervolume")) {
            threshold *= Double.parseDouble(commandLine.getOptionValue("hypervolume"));
        } else {
            throw new MissingOptionException("requires hypervolume option");
        }
    }

    try {
        //setup the output stream
        if (commandLine.hasOption("output")) {
            output = new PrintStream(new File(commandLine.getOptionValue("output")));
        } else {
            output = System.out;
        }

        //process all the files listed on the command line
        String[] filenames = commandLine.getArgs();

        for (int i = 0; i < filenames.length; i++) {
            if (i > 0) {
                output.println();
            }

            metrics = loadMetrics(new File(filenames[i]));

            output.print(filenames[i]);
            output.println(":");
            output.print("  Best: ");
            output.println(calculateBest());
            output.print("  Attainment: ");
            output.println(calculateAttainment());

            if (commandLine.hasOption("controllability")) {
                output.print("  Controllability: ");
                output.println(calculateControllability());
            }

            if (commandLine.hasOption("efficiency")) {
                output.print("  Efficiency: ");
                output.println(calculateEfficiency());
            }
        }
    } finally {
        if ((output != null) && (output != System.out)) {
            output.close();
        }
    }
}

From source file:ca.psiphon.PsiphonTunnel.java

private String setupTrustedCertificates(Context context) throws Exception {

    // Copy the Android system CA store to a local, private cert bundle file.
    ///*from   w  w  w  . ja v  a 2 s . c  o m*/
    // This results in a file that can be passed to SSL_CTX_load_verify_locations
    // for use with OpenSSL modes in tunnel-core.
    // https://www.openssl.org/docs/manmaster/ssl/SSL_CTX_load_verify_locations.html
    //
    // TODO: to use the path mode of load_verify_locations would require emulating
    // the filename scheme used by c_rehash:
    // https://www.openssl.org/docs/manmaster/apps/c_rehash.html
    // http://stackoverflow.com/questions/19237167/the-new-subject-hash-openssl-algorithm-differs

    File directory = context.getDir("PsiphonCAStore", Context.MODE_PRIVATE);

    final String errorMessage = "copy AndroidCAStore failed";
    try {

        File file = new File(directory, "certs.dat");

        // Pave a fresh copy on every run, which ensures we're not using old certs.
        // Note: assumes KeyStore doesn't return revoked certs.
        //
        // TODO: this takes under 1 second, but should we avoid repaving every time?
        file.delete();

        PrintStream output = null;
        try {
            output = new PrintStream(new FileOutputStream(file));

            KeyStore keyStore;
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
                keyStore = KeyStore.getInstance("AndroidCAStore");
                keyStore.load(null, null);
            } else {
                keyStore = KeyStore.getInstance("BKS");
                FileInputStream inputStream = new FileInputStream("/etc/security/cacerts.bks");
                try {
                    keyStore.load(inputStream, "changeit".toCharArray());
                } finally {
                    if (inputStream != null) {
                        inputStream.close();
                    }
                }
            }

            Enumeration<String> aliases = keyStore.aliases();
            while (aliases.hasMoreElements()) {
                String alias = aliases.nextElement();
                X509Certificate cert = (X509Certificate) keyStore.getCertificate(alias);

                output.println("-----BEGIN CERTIFICATE-----");
                String pemCert = new String(Base64.encode(cert.getEncoded(), Base64.NO_WRAP), "UTF-8");
                // OpenSSL appears to reject the default linebreaking done by Base64.encode,
                // so we manually linebreak every 64 characters
                for (int i = 0; i < pemCert.length(); i += 64) {
                    output.println(pemCert.substring(i, Math.min(i + 64, pemCert.length())));
                }
                output.println("-----END CERTIFICATE-----");
            }

            mHostService.onDiagnosticMessage("prepared PsiphonCAStore");

            return file.getAbsolutePath();

        } finally {
            if (output != null) {
                output.close();
            }
        }

    } catch (KeyStoreException e) {
        throw new Exception(errorMessage, e);
    } catch (NoSuchAlgorithmException e) {
        throw new Exception(errorMessage, e);
    } catch (CertificateException e) {
        throw new Exception(errorMessage, e);
    } catch (IOException e) {
        throw new Exception(errorMessage, e);
    }
}

From source file:org.apache.asterix.external.classad.test.ClassAdToADMTest.java

@SuppressWarnings("rawtypes")
public void testSchemaful() {
    try {//  w  w  w.jav  a2s. c o m
        File file = new File("target/classad-wtih-temporals.adm");
        File expected = new File(
                getClass().getResource("/results/classad-with-temporals.adm").toURI().getPath());
        FileUtils.deleteQuietly(file);
        PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
        String[] recordFieldNames = { "GlobalJobId", "Owner", "ClusterId", "ProcId", "RemoteWallClockTime",
                "CompletionDate", "QDate", "JobCurrentStartDate", "JobStartDate",
                "JobCurrentStartExecutingDate" };
        IAType[] recordFieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32,
                BuiltinType.AINT32, BuiltinType.ADURATION, BuiltinType.ADATETIME, BuiltinType.ADATETIME,
                BuiltinType.ADATETIME, BuiltinType.ADATETIME, BuiltinType.ADATETIME };
        ARecordType recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
        int numOfTupleFields = 1;
        ISerializerDeserializer[] serdes = new ISerializerDeserializer[1];
        serdes[0] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
        IPrinterFactory[] printerFactories = new IPrinterFactory[1];
        printerFactories[0] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
        // create output descriptor
        IPrinter[] printers = new IPrinter[printerFactories.length];
        for (int i = 0; i < printerFactories.length; i++) {
            printers[i] = printerFactories[i].createPrinter();
        }
        ClassAdObjectPool objectPool = new ClassAdObjectPool();
        String[] files = new String[] { "/classad-with-temporals.classads" };
        ClassAdParser parser = new ClassAdParser(recordType, false, false, false, null, null, null, objectPool);
        ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
        for (String path : files) {
            List<Path> paths = new ArrayList<>();
            paths.add(Paths.get(getClass().getResource(path).toURI()));
            FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
            LocalFSInputStream in = new LocalFSInputStream(watcher);
            SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader(in, "[", "]");
            while (recordReader.hasNext()) {
                tb.reset();
                IRawRecord<char[]> record = recordReader.next();
                parser.parse(record, tb.getDataOutput());
                tb.addFieldEndOffset();
                printTuple(tb, printers, printStream);
            }
            recordReader.close();
            printStream.close();
            Assert.assertTrue(FileUtils.contentEquals(file, expected));
        }
    } catch (Throwable th) {
        System.err.println("TEST FAILED");
        th.printStackTrace();
        Assert.assertTrue(false);
    }
    System.err.println("TEST PASSED");
}

From source file:de.juwimm.cms.remote.ContentServiceSpringImpl.java

/**
 * Creates a new Unit-Edition for the active site and returns it as SOAP-Attachment.
 * // w  w  w.  ja va 2 s. c  o m
 * @throws UserException
 * 
 * @see de.juwimm.cms.remote.ContentServiceSpring#exportEditionUnit(java.lang.Integer)
 */
@Override
protected InputStream handleExportEditionUnit(Integer rootViewComponentId) throws Exception {
    try {
        File fle = File.createTempFile("edition_unit_export", ".xml.gz");
        FileOutputStream fout = new FileOutputStream(fle);
        GZIPOutputStream gzoudt = new GZIPOutputStream(fout);
        PrintStream out = new PrintStream(gzoudt, true, "UTF-8");
        EditionHbm edition = super.getEditionHbmDao().create("RETURNEDITION", rootViewComponentId, out, true);
        super.getEditionHbmDao().remove(edition);
        out.flush();
        out.close();
        out = null;
        return new FileInputStream(fle);
    } catch (Exception e) {
        log.error("Could not export edition unit", e);
        throw new UserException(e.getMessage());
    }
}

From source file:com.netscape.cms.servlet.csadmin.ConfigurationUtils.java

public static String submitAdminCertRequest(String ca_hostname, int ca_port, String profileId,
        String certRequestType, String certRequest, String subjectDN) throws Exception {

    logger.debug("ConfigurationUtils: submitAdminCertRequest()");

    IConfigStore config = CMS.getConfigStore();

    if (profileId == null) {
        profileId = config.getString("preop.admincert.profile", "caAdminCert");
    }/*  ww  w.  java2s  .c om*/

    String session_id = CMS.getConfigSDSessionId();

    MultivaluedMap<String, String> content = new MultivaluedHashMap<String, String>();
    content.putSingle("profileId", profileId);
    content.putSingle("cert_request_type", certRequestType);
    content.putSingle("cert_request", certRequest);
    content.putSingle("xmlOutput", "true");
    content.putSingle("sessionID", session_id);
    content.putSingle("subject", subjectDN);

    String c = post(ca_hostname, ca_port, true, "/ca/ee/ca/profileSubmit", content, null, null);

    // retrieve the request Id and admin certificate
    if (c != null) {
        ByteArrayInputStream bis = new ByteArrayInputStream(c.getBytes());
        XMLObject parser = new XMLObject(bis);

        String status = parser.getValue("Status");
        logger.debug("submitAdminXertRequest: status=" + status);
        if (status.equals(AUTH_FAILURE)) {
            throw new EAuthException("Unable to generate admin certificate: authentication failure");

        } else if (!status.equals(SUCCESS)) {
            String error = parser.getValue("Error");
            logger.error("Error: " + error);
            throw new IOException("Unable to generate admin certificate: " + error);
        }

        IConfigStore cs = CMS.getConfigStore();
        String id = parser.getValue("Id");

        cs.putString("preop.admincert.requestId.0", id);
        String serial = parser.getValue("serialno");

        cs.putString("preop.admincert.serialno.0", serial);
        String b64 = parser.getValue("b64");

        // save in a file for access by ImportAdminCertPanel
        String instanceRoot = cs.getString("instanceRoot", "");
        String dir = instanceRoot + File.separator + "conf" + File.separator + "admin.b64";
        cs.putString("preop.admincert.b64", dir);

        PrintStream ps = new PrintStream(dir, "UTF-8");
        ps.println(b64);
        ps.flush();
        ps.close();

        return b64;
    } else {
        throw new IOException("submitAdminCertRequest: Failed to get response from ca");
    }
}

From source file:com.moscona.dataSpace.persistence.DirectoryDataStore.java

@Override
public void dumpDataSpaceSummary(DataSpace dataSpace) throws FileNotFoundException, DataSpaceException {
    stats.startTimerFor("dumpDataSpaceSummary");
    PrintStream out = new PrintStream(new FileOutputStream(dataSpaceSummaryFileName()));
    try {//  ww w .  ja v a 2 s.  c om
        out.println("Persistent name space");
        INameSpace ns = dataSpace.getPersistentNameSpace();
        List<String> vars = ns.getAssignedVariableNames();
        HashSet<IDataElement> visited = new HashSet<IDataElement>();

        out.println();
        out.println("List of variables:");
        out.println();
        for (String var : vars) {
            IDataElement element = ns.get(var);
            out.println("  " + var + ": " + element.getClass().getSimpleName() + "  Description: "
                    + element.getDescription());
        }

        out.println();
        out.println();
        out.println("Details:");
        out.println();
        out.println();

        for (String var : vars) {
            IDataElement element = ns.get(var);
            visited.add(element);
            out.println();
            out.println("Variable: \"" + var + "\": " + element.getClass().getSimpleName());
            out.println("  Description: " + element.getDescription());
            if (IScalar.class.isAssignableFrom(element.getClass())) {
                dumpSummary((IScalar) element, out);
            } else if (element instanceof AbstractVector) {
                dumpSummary((AbstractVector) element, out, " ");
            } else if (element instanceof DataFrame) {
                dumpSummary((DataFrame) element, out, visited);
            } else {
                out.println("  no summary in for available for " + element.getClass());
            }
        }
    } finally {
        try {
            stats.stopTimerFor("dumpDataSpaceSummary");
        } catch (InvalidStateException e) {
            // do nothing
        }
        out.close();
    }
}

From source file:info.mikaelsvensson.devtools.analysis.localaccesslog.LocalAccessLogReportGenerator.java

@Override
public void generateReport(File outputFile, ReportPrinter reportPrinter) throws FileNotFoundException {
    final PrintStream ps = new PrintStream(outputFile);
    final Collection<LocalAccessLogSample> allSamples = _log.getSamples();
    Map<String, Collection<LocalAccessLogSample>> samplesByTestSession = SampleCollector.COLLECTOR_BY_SESSION_DATE
            .getFilteredAndGrouped(allSamples);
    for (Map.Entry<String, Collection<LocalAccessLogSample>> sessionEntry : samplesByTestSession.entrySet()) {
        final Collection<LocalAccessLogSample> sessionSamples = sessionEntry.getValue();
        Map<String, Collection<LocalAccessLogSample>> samples = SAMPLE_COLLECTOR
                .getFilteredAndGrouped(sessionSamples);
        String[][] data = new String[samples.size() + 1][];
        int i = 0;
        int sumCount = 0;
        final DefaultPieDataset dataset = new DefaultPieDataset();
        final JFreeChart chart = ChartFactory.createPieChart(
                "Status Codes For Session " + sessionEntry.getKey(), dataset, true, false, Locale.ENGLISH);
        final File chartFile = new File(outputFile.getAbsolutePath() + "."
                + StringUtils.remove(sessionEntry.getKey(), ':').replace(' ', '-') + ".png");
        final PiePlot plot = (PiePlot) chart.getPlot();
        for (Map.Entry<String, Collection<LocalAccessLogSample>> entry : samples.entrySet()) {
            final Collection<LocalAccessLogSample> responseCodeSamples = entry.getValue();
            final int count = responseCodeSamples.size();
            data[i++] = new String[] { entry.getKey(), ToStringUtil.toString(count),
                    ToStringUtil.toString(_log.calculateAverage(responseCodeSamples)),
                    ToStringUtil.toString(_log.calculateMin(responseCodeSamples)),
                    ToStringUtil.toString(_log.calculateMax(responseCodeSamples)) };
            sumCount += count;/*from   ww w.jav  a 2  s . c  o  m*/

            final String label = entry.getKey() + " (" + count + " reqs)";
            dataset.setValue(label, count);
            plot.setSectionPaint(label, entry.getKey().equals("200") ? Color.GREEN : Color.RED);
        }
        data[i] = new String[] { "All", ToStringUtil.toString(sumCount),
                ToStringUtil.toString(_log.calculateAverage(sessionSamples)),
                ToStringUtil.toString(_log.calculateMin(sessionSamples)),
                ToStringUtil.toString(_log.calculateMax(sessionSamples)) };

        reportPrinter.printTable(ps, sessionEntry.getKey(), 10,
                new String[] { "Status Code", "# Requests", "Avg [ms]", "Min [ms]", "Max [ms]" }, data, null);

        if (sumCount > NUMBER_OF_REQUESTS_IN_SHORT_TEST) {
            try {
                ChartUtilities.saveChartAsPNG(chartFile, chart, 500, 500);
            } catch (IOException e) {
                e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
            }
        }
    }
    ps.close();
}

From source file:edu.ku.brc.dbsupport.ImportExportDB.java

/**
     * write all the records of the given table.
     * @param dbTable the class name of the table
     * @return creates an xml file with name of the table
     *///from ww w . j  a  v a2  s  . com
    @SuppressWarnings("unchecked")
    public void writeXMLfile(String dataBase) {
        FileOutputStream fout;

        Session dom4jSession = session.getSession(EntityMode.DOM4J);
        String query = "from " + dataBase + " where id = 1"; //$NON-NLS-1$ //$NON-NLS-2$

        System.out.println(query);

        List userXML = dom4jSession.createQuery(query).list();
        try {
            fout = new FileOutputStream(importFolderPath + dataBase + ".xml"); //$NON-NLS-1$
            PrintStream p = new PrintStream(fout);
            p.print("<root>"); //$NON-NLS-1$
            OutputFormat format = OutputFormat.createPrettyPrint();
            XMLWriter writer = new XMLWriter(fout, format);

            for (int i = 0; i < userXML.size(); i++) {
                Element writeMe = (Element) userXML.get(i);
                writer.write(writeMe);
            }
            p.println("\n</root>"); //$NON-NLS-1$
            p.close();
            fout.close();
            writer.close();
            System.out.println("Wrote: " + dataBase + ".xml"); //$NON-NLS-1$ //$NON-NLS-2$
        } catch (Exception ex) {
            edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount();
            edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(ImportExportDB.class, ex);
            ex.printStackTrace();
        }

    }

From source file:edu.ku.brc.dbsupport.ImportExportDB.java

/**
     * write a single record//from   w  ww  .j a v a2s  .c  om
     * @param dbTable the class name of the table
     * @param id the id number of the record
     */
    @SuppressWarnings("unchecked")
    public void writeSingleRecordXML(String dbTable, int id) {
        FileOutputStream fout;

        Session dom4jSession = session.getSession(EntityMode.DOM4J);
        // load the object by using its primary key
        DBTableInfo info = DBTableIdMgr.getInstance().getInfoByTableName(dbTable.toLowerCase());
        String primaryKey = info.getPrimaryKeyName();
        String query = "from " + dbTable + " where " + primaryKey + " = " + id; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$

        List userXML = dom4jSession.createQuery(query).list();

        try {
            fout = new FileOutputStream(importFolderPath + dbTable + ".xml"); //$NON-NLS-1$
            PrintStream p = new PrintStream(fout);
            p.print("<root>"); //$NON-NLS-1$
            OutputFormat format = OutputFormat.createPrettyPrint();
            XMLWriter writer = new XMLWriter(fout, format);

            for (int i = 0; i < userXML.size(); i++) {
                Element writeMe = (Element) userXML.get(i);
                writer.write(writeMe);
            }
            p.println("\n</root>"); //$NON-NLS-1$
            p.close();
            fout.close();
            writer.close();
            System.out.println("Wrote: " + dbTable + ".xml"); //$NON-NLS-1$ //$NON-NLS-2$
        } catch (Exception ex) {
            edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount();
            edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(ImportExportDB.class, ex);
            ex.printStackTrace();
        }
        System.out.println();
    }

From source file:mvm.rya.accumulo.mr.fileinput.BulkNtripsInputTool.java

@Override
public int run(final String[] args) throws Exception {
    final Configuration conf = getConf();
    try {//from   w  ww .j av  a  2  s  .co m
        //conf
        zk = conf.get(MRUtils.AC_ZK_PROP, zk);
        ttl = conf.get(MRUtils.AC_TTL_PROP, ttl);
        instance = conf.get(MRUtils.AC_INSTANCE_PROP, instance);
        userName = conf.get(MRUtils.AC_USERNAME_PROP, userName);
        pwd = conf.get(MRUtils.AC_PWD_PROP, pwd);
        workDirBase = conf.get(WORKDIR_PROP, workDirBase);
        format = conf.get(MRUtils.FORMAT_PROP, format);
        conf.set(MRUtils.FORMAT_PROP, format);
        final String inputDir = args[0];

        ZooKeeperInstance zooKeeperInstance = new ZooKeeperInstance(instance, zk);
        Connector connector = zooKeeperInstance.getConnector(userName, new PasswordToken(pwd));
        TableOperations tableOperations = connector.tableOperations();

        if (conf.get(AccumuloRdfConfiguration.CONF_ADDITIONAL_INDEXERS) != null) {
            throw new IllegalArgumentException("Cannot use Bulk N Trips tool with Additional Indexers");
        }

        String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null);
        if (tablePrefix != null)
            RdfCloudTripleStoreConstants.prefixTables(tablePrefix);
        String[] tables = { tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX,
                tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX,
                tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX };
        Collection<Job> jobs = new ArrayList<Job>();
        for (final String tableName : tables) {
            PrintStream out = null;
            try {
                String workDir = workDirBase + "/" + tableName;
                System.out.println("Loading data into table[" + tableName + "]");

                Job job = new Job(new Configuration(conf),
                        "Bulk Ingest load data to Generic RDF Table[" + tableName + "]");
                job.setJarByClass(this.getClass());
                //setting long job
                Configuration jobConf = job.getConfiguration();
                jobConf.setBoolean("mapred.map.tasks.speculative.execution", false);
                jobConf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
                jobConf.set("io.sort.mb", jobConf.get("io.sort.mb", "256"));
                jobConf.setBoolean("mapred.compress.map.output", true);
                //                    jobConf.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); //TODO: I would like LZO compression

                job.setInputFormatClass(TextInputFormat.class);

                job.setMapperClass(ParseNtripsMapper.class);
                job.setMapOutputKeyClass(Key.class);
                job.setMapOutputValueClass(Value.class);

                job.setCombinerClass(OutStmtMutationsReducer.class);
                job.setReducerClass(OutStmtMutationsReducer.class);
                job.setOutputFormatClass(AccumuloFileOutputFormat.class);
                // AccumuloFileOutputFormat.setZooKeeperInstance(jobConf, instance, zk);

                jobConf.set(ParseNtripsMapper.TABLE_PROPERTY, tableName);

                TextInputFormat.setInputPaths(job, new Path(inputDir));

                FileSystem fs = FileSystem.get(conf);
                Path workPath = new Path(workDir);
                if (fs.exists(workPath))
                    fs.delete(workPath, true);

                //make failures dir
                Path failures = new Path(workDir, "failures");
                fs.delete(failures, true);
                fs.mkdirs(new Path(workDir, "failures"));

                AccumuloFileOutputFormat.setOutputPath(job, new Path(workDir + "/files"));

                out = new PrintStream(new BufferedOutputStream(fs.create(new Path(workDir + "/splits.txt"))));

                if (!tableOperations.exists(tableName))
                    tableOperations.create(tableName);
                Collection<Text> splits = tableOperations.getSplits(tableName, Integer.MAX_VALUE);
                for (Text split : splits)
                    out.println(new String(Base64.encodeBase64(TextUtil.getBytes(split))));

                job.setNumReduceTasks(splits.size() + 1);
                out.close();

                job.setPartitionerClass(KeyRangePartitioner.class);
                RangePartitioner.setSplitFile(job, workDir + "/splits.txt");

                jobConf.set(WORKDIR_PROP, workDir);

                job.submit();
                jobs.add(job);

            } catch (Exception re) {
                throw new RuntimeException(re);
            } finally {
                if (out != null)
                    out.close();
            }
        }

        for (Job job : jobs) {
            while (!job.isComplete()) {
                Thread.sleep(1000);
            }
        }

        for (String tableName : tables) {
            String workDir = workDirBase + "/" + tableName;
            String filesDir = workDir + "/files";
            String failuresDir = workDir + "/failures";

            FileSystem fs = FileSystem.get(conf);

            //make sure that the "accumulo" user can read/write/execute into these directories this path
            fs.setPermission(new Path(filesDir), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
            fs.setPermission(new Path(failuresDir), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));

            tableOperations.importDirectory(tableName, filesDir, failuresDir, false);

        }

    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    return 0;
}