Example usage for java.io IOException printStackTrace

List of usage examples for java.io IOException printStackTrace

Introduction

In this page you can find the example usage for java.io IOException printStackTrace.

Prototype

public void printStackTrace() 

Source Link

Document

Prints this throwable and its backtrace to the standard error stream.

Usage

From source file:ca.uqac.info.monitor.BeepBeepMonitor.java

public static void main(String[] args) {
    int verbosity = 1, slowdown = 0, tcp_port = 0;
    boolean show_stats = false, to_stdout = false;
    String trace_filename = "", pipe_filename = "", event_name = "message";
    final MonitorFactory mf = new MonitorFactory();

    // In case we open a socket
    ServerSocket m_serverSocket = null;
    Socket m_connection = null;//  ww w .j  av  a2  s.  co m

    // Parse command line arguments
    Options options = setupOptions();
    CommandLine c_line = setupCommandLine(args, options);
    assert c_line != null;
    if (c_line.hasOption("verbosity")) {
        verbosity = Integer.parseInt(c_line.getOptionValue("verbosity"));
    }
    if (verbosity > 0) {
        showHeader();
    }
    if (c_line.hasOption("version")) {
        System.err.println("(C) 2008-2013 Sylvain Hall et al., Universit du Qubec  Chicoutimi");
        System.err.println("This program comes with ABSOLUTELY NO WARRANTY.");
        System.err.println("This is a free software, and you are welcome to redistribute it");
        System.err.println("under certain conditions. See the file COPYING for details.\n");
        System.exit(ERR_OK);
    }
    if (c_line.hasOption("h")) {
        showUsage(options);
        System.exit(ERR_OK);
    }
    if (c_line.hasOption("version")) {
        System.exit(ERR_OK);
    }
    if (c_line.hasOption("slowdown")) {
        slowdown = Integer.parseInt(c_line.getOptionValue("slowdown"));
        if (verbosity > 0)
            System.err.println("Slowdown factor: " + slowdown + " ms");
    }
    if (c_line.hasOption("stats")) {
        show_stats = true;
    }
    if (c_line.hasOption("csv")) {
        // Will output data in CSV format to stdout
        to_stdout = true;
    }
    if (c_line.hasOption("eventname")) {
        // Set event name
        event_name = c_line.getOptionValue("eventname");
    }
    if (c_line.hasOption("t")) {
        // Read events from a trace
        trace_filename = c_line.getOptionValue("t");
    }
    if (c_line.hasOption("p")) {
        // Read events from a pipe
        pipe_filename = c_line.getOptionValue("p");
    }
    if (c_line.hasOption("k")) {
        // Read events from a TCP port
        tcp_port = Integer.parseInt(c_line.getOptionValue("k"));
    }
    if (!trace_filename.isEmpty() && !pipe_filename.isEmpty()) {
        System.err.println("ERROR: you must specify at most one of trace file or named pipe");
        showUsage(options);
        System.exit(ERR_ARGUMENTS);
    }
    @SuppressWarnings("unchecked")
    List<String> remaining_args = c_line.getArgList();
    if (remaining_args.isEmpty()) {
        System.err.println("ERROR: no input formula specified");
        showUsage(options);
        System.exit(ERR_ARGUMENTS);
    }
    // Instantiate the event notifier
    boolean notify = (verbosity > 0);
    EventNotifier en = new EventNotifier(notify);
    en.m_slowdown = slowdown;
    en.m_csvToStdout = to_stdout;
    // Create one monitor for each input file and add it to the notifier 
    for (String formula_filename : remaining_args) {
        try {
            String formula_contents = FileReadWrite.readFile(formula_filename);
            Operator op = Operator.parseFromString(formula_contents);
            op.accept(mf);
            Monitor mon = mf.getMonitor();
            Map<String, String> metadata = getMetadata(formula_contents);
            metadata.put("Filename", formula_filename);
            en.addMonitor(mon, metadata);
        } catch (IOException e) {
            e.printStackTrace();
            System.exit(ERR_IO);
        } catch (Operator.ParseException e) {
            System.err.println("Error parsing input formula");
            System.exit(ERR_PARSE);
        }
    }

    // Read trace and iterate
    // Opens file
    PipeReader pr = null;
    try {
        if (!pipe_filename.isEmpty()) {
            // We tell the pipe reader we read a pipe
            File f = new File(pipe_filename);
            if (verbosity > 0)
                System.err.println("Reading from pipe named " + f.getName());
            pr = new PipeReader(new FileInputStream(f), en, false);
        } else if (!trace_filename.isEmpty()) {
            // We tell the pipe reader we read a regular file
            File f = new File(trace_filename);
            if (verbosity > 0)
                System.err.println("Reading from file " + f.getName());
            pr = new PipeReader(new FileInputStream(f), en, true);
        } else if (tcp_port > 0) {
            // We tell the pipe reader we read from a socket
            if (verbosity > 0)
                System.err.println("Reading from TCP port " + tcp_port);
            m_serverSocket = new ServerSocket(tcp_port);
            m_connection = m_serverSocket.accept();
            pr = new PipeReader(m_connection.getInputStream(), en, false);
        } else {
            // We tell the pipe reader we read from standard input
            if (verbosity > 0)
                System.err.println("Reading from standard input");
            pr = new PipeReader(System.in, en, false);
        }
    } catch (FileNotFoundException ex) {
        // We print both trace and pipe since one of them must be empty
        System.err.println("ERROR: file not found " + trace_filename + pipe_filename);
        System.exit(ERR_IO);
    } catch (IOException e) {
        // Caused by socket error
        e.printStackTrace();
        System.exit(ERR_IO);
    }
    pr.setSeparator("<" + event_name + ">", "</" + event_name + ">");

    // Check parameters for the event notifier
    if (c_line.hasOption("no-trigger")) {
        en.m_notifyOnVerdict = false;
    } else {
        en.m_notifyOnVerdict = true;
    }
    if (c_line.hasOption("mirror")) {
        en.m_mirrorEventsOnStdout = true;
    }

    // Start event notifier
    en.reset();
    Thread th = new Thread(pr);
    long clock_start = System.nanoTime();
    th.start();
    try {
        th.join(); // Wait for thread to finish
    } catch (InterruptedException e1) {
        // Thread is finished
    }
    if (tcp_port > 0 && m_serverSocket != null) {
        // We opened a socket; now we close it
        try {
            m_serverSocket.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    long clock_end = System.nanoTime();
    int ret_code = pr.getReturnCode();
    switch (ret_code) {
    case PipeReader.ERR_EOF:
        if (verbosity > 0)
            System.err.println("\nEnd of file reached");
        break;
    case PipeReader.ERR_EOT:
        if (verbosity > 0)
            System.err.println("\nEOT received on pipe: closing");
        break;
    case PipeReader.ERR_OK:
        // Do nothing
        break;
    default:
        // An error
        System.err.println("Runtime error");
        System.exit(ERR_RUNTIME);
        break;
    }
    if (show_stats) {
        if (verbosity > 0) {
            System.out.println("Messages:   " + en.m_numEvents);
            System.out.println("Time:       " + (int) (en.m_totalTime / 1000000f) + " ms");
            System.out.println("Clock time: " + (int) ((clock_end - clock_start) / 1000000f) + " ms");
            System.out.println("Max heap:   " + (int) (en.heapSize / 1048576f) + " MB");
        } else {
            // If stats are asked but verbosity = 0, only show time value
            // (both monitor and wall clock) 
            System.out.print((int) (en.m_totalTime / 1000000f));
            System.out.print(",");
            System.out.print((int) ((clock_end - clock_start) / 1000000f));
        }
    }
    System.exit(ERR_OK);
}

From source file:ClassFileUtilities.java

/**
 * Program that computes the dependencies between the Batik jars.
 * <p>//from w  w w .  j  a v a2  s  .  co m
 *   Run this from the main Batik distribution directory, after building
 *   the jars.  For every jar file in the batik-xxx/ build directory,
 *   it will determine which other jar files it directly depends on.
 *   The output is lines of the form:
 * </p>
 * <pre>  <i>number</i>,<i>from</i>,<i>to</i></pre>
 * <p>
 *   where mean that the <i>from</i> jar has <i>number</i> class files
 *   that depend on class files in the <i>to</i> jar.
 * </p>
 */
public static void main(String[] args) {
    boolean showFiles = false;
    if (args.length == 1 && args[0].equals("-f")) {
        showFiles = true;
    } else if (args.length != 0) {
        System.err.println("usage: ClassFileUtilities [-f]");
        System.err.println();
        System.err.println("  -f    list files that cause each jar file dependency");
        System.exit(1);
    }

    File cwd = new File(".");
    File buildDir = null;
    String[] cwdFiles = cwd.list();
    for (int i = 0; i < cwdFiles.length; i++) {
        if (cwdFiles[i].startsWith("batik-")) {
            buildDir = new File(cwdFiles[i]);
            if (!buildDir.isDirectory()) {
                buildDir = null;
            } else {
                break;
            }
        }
    }
    if (buildDir == null || !buildDir.isDirectory()) {
        System.out.println("Directory 'batik-xxx' not found in current directory!");
        return;
    }

    try {
        Map cs = new HashMap();
        Map js = new HashMap();
        collectJars(buildDir, js, cs);

        Set classpath = new HashSet();
        Iterator i = js.values().iterator();
        while (i.hasNext()) {
            classpath.add(((Jar) i.next()).jarFile);
        }

        i = cs.values().iterator();
        while (i.hasNext()) {
            ClassFile fromFile = (ClassFile) i.next();
            // System.out.println(fromFile.name);
            Set result = getClassDependencies(fromFile.getInputStream(), classpath, false);
            Iterator j = result.iterator();
            while (j.hasNext()) {
                ClassFile toFile = (ClassFile) cs.get(j.next());
                if (fromFile != toFile && toFile != null) {
                    fromFile.deps.add(toFile);
                }
            }
        }

        i = cs.values().iterator();
        while (i.hasNext()) {
            ClassFile fromFile = (ClassFile) i.next();
            Iterator j = fromFile.deps.iterator();
            while (j.hasNext()) {
                ClassFile toFile = (ClassFile) j.next();
                Jar fromJar = fromFile.jar;
                Jar toJar = toFile.jar;
                if (fromFile.name.equals(toFile.name) || toJar == fromJar
                        || fromJar.files.contains(toFile.name)) {
                    continue;
                }
                Integer n = (Integer) fromJar.deps.get(toJar);
                if (n == null) {
                    fromJar.deps.put(toJar, new Integer(1));
                } else {
                    fromJar.deps.put(toJar, new Integer(n.intValue() + 1));
                }
            }
        }

        List triples = new ArrayList(10);
        i = js.values().iterator();
        while (i.hasNext()) {
            Jar fromJar = (Jar) i.next();
            Iterator j = fromJar.deps.keySet().iterator();
            while (j.hasNext()) {
                Jar toJar = (Jar) j.next();
                Triple t = new Triple();
                t.from = fromJar;
                t.to = toJar;
                t.count = ((Integer) fromJar.deps.get(toJar)).intValue();
                triples.add(t);
            }
        }
        Collections.sort(triples);

        i = triples.iterator();
        while (i.hasNext()) {
            Triple t = (Triple) i.next();
            System.out.println(t.count + "," + t.from.name + "," + t.to.name);
            if (showFiles) {
                Iterator j = t.from.files.iterator();
                while (j.hasNext()) {
                    ClassFile fromFile = (ClassFile) j.next();
                    Iterator k = fromFile.deps.iterator();
                    while (k.hasNext()) {
                        ClassFile toFile = (ClassFile) k.next();
                        if (toFile.jar == t.to && !t.from.files.contains(toFile.name)) {
                            System.out.println("\t" + fromFile.name + " --> " + toFile.name);
                        }
                    }
                }
            }
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.openteach.diamond.network.waverider.command.Command.java

public static void main(String[] args) {

    ByteArrayOutputStream bout = null;
    ObjectOutputStream objOutputStream = null;

    try {/* ww w  .jav  a  2  s  .c o m*/
        bout = new ByteArrayOutputStream();
        objOutputStream = new ObjectOutputStream(bout);
        SlaveState slaveState = new SlaveState();
        slaveState.setId(1L);
        slaveState.setIsMasterCandidate(false);
        objOutputStream.writeObject(slaveState);
        objOutputStream.flush();
        Command command = CommandFactory.createHeartbeatCommand(ByteBuffer.wrap(bout.toByteArray()));

        ByteBuffer buffer = command.marshall();
        Command cmd = Command.unmarshall(buffer);
        SlaveState ss = SlaveState.fromByteBuffer(cmd.getPayLoad());
        System.out.println(cmd.toString());
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        try {
            if (objOutputStream != null) {
                objOutputStream.close();
            }
            if (bout != null) {
                bout.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:edu.umn.cs.sthadoop.trajectory.TrajectoryOverlap.java

public static void main(String[] args) throws Exception {

    //      args = new String[8];
    //      args[0] = "/export/scratch/mntgData/geolifeGPS/geolife_Trajectories_1.3/HDFS/index_geolife";
    //      args[1] = "/export/scratch/mntgData/geolifeGPS/geolife_Trajectories_1.3/HDFS/knn-dis-result";
    //      args[2] = "shape:edu.umn.cs.sthadoop.trajectory.GeolifeTrajectory";
    //      args[3] = "interval:2008-05-01,2008-05-30";
    //      args[4] = "time:month";
    //      args[5] = "traj:39.9119983,116.606835;39.9119783,116.6065483;39.9119599,116.6062649;39.9119416,116.6059899;39.9119233,116.6057282;39.9118999,116.6054783;39.9118849,116.6052366;39.9118666,116.6050099;39.91185,116.604775;39.9118299,116.604525;39.9118049,116.6042649;39.91177,116.6040166;39.9117516,116.6037583;39.9117349,116.6035066;39.9117199,116.6032666;39.9117083,116.6030232;39.9117,116.6027566;39.91128,116.5969383;39.9112583,116.5966766;39.9112383,116.5964232;39.9112149,116.5961699;39.9111933,116.5959249;39.9111716,116.5956883";
    //      args[6] = "-overwrite";
    //      args[7] = "-local";//"-no-local";

    final OperationsParams params = new OperationsParams(new GenericOptionsParser(args));

    final Path[] paths = params.getPaths();
    if (paths.length <= 1 && !params.checkInput()) {
        printUsage();//  ww w . ja  va 2  s . c om
        System.exit(1);
    }
    if (paths.length >= 2 && !params.checkInputOutput()) {
        printUsage();
        System.exit(1);
    }

    if (params.get("traj") == null) {
        System.err.println("Trajectory query is missing");
        printUsage();
        System.exit(1);
    }

    // Invoke method to compute the trajectory MBR. 
    String rectangle = getTrajectoryRectangle(params.get("traj"));
    params.set("rect", rectangle);

    if (params.get("rect") == null) {
        System.err.println("You must provide a Trajectory Query");
        printUsage();
        System.exit(1);
    }

    if (params.get("interval") == null) {
        System.err.println("Temporal range missing");
        printUsage();
        System.exit(1);
    }

    TextSerializable inObj = params.getShape("shape");
    if (!(inObj instanceof STPoint)) {
        LOG.error("Shape is not instance of STPoint");
        printUsage();
        System.exit(1);
    }

    // Get spatio-temporal slices.
    List<Path> STPaths = getIndexedSlices(params);
    final Path outPath = params.getOutputPath();
    final Rectangle[] queryRanges = params.getShapes("rect", new Rectangle());

    // All running jobs
    final Vector<Long> resultsCounts = new Vector<Long>();
    Vector<Job> jobs = new Vector<Job>();
    Vector<Thread> threads = new Vector<Thread>();

    long t1 = System.currentTimeMillis();
    for (Path stPath : STPaths) {
        final Path inPath = stPath;
        for (int i = 0; i < queryRanges.length; i++) {
            final OperationsParams queryParams = new OperationsParams(params);
            OperationsParams.setShape(queryParams, "rect", queryRanges[i]);
            if (OperationsParams.isLocal(new JobConf(queryParams), inPath)) {
                // Run in local mode
                final Rectangle queryRange = queryRanges[i];
                final Shape shape = queryParams.getShape("shape");
                final Path output = outPath == null ? null
                        : (queryRanges.length == 1 ? outPath : new Path(outPath, String.format("%05d", i)));
                Thread thread = new Thread() {
                    @Override
                    public void run() {
                        FSDataOutputStream outFile = null;
                        final byte[] newLine = System.getProperty("line.separator", "\n").getBytes();
                        try {
                            ResultCollector<Shape> collector = null;
                            if (output != null) {
                                FileSystem outFS = output.getFileSystem(queryParams);
                                final FSDataOutputStream foutFile = outFile = outFS.create(output);
                                collector = new ResultCollector<Shape>() {
                                    final Text tempText = new Text2();

                                    @Override
                                    public synchronized void collect(Shape r) {
                                        try {
                                            tempText.clear();
                                            r.toText(tempText);
                                            foutFile.write(tempText.getBytes(), 0, tempText.getLength());
                                            foutFile.write(newLine);
                                        } catch (IOException e) {
                                            e.printStackTrace();
                                        }
                                    }
                                };
                            } else {
                                outFile = null;
                            }
                            long resultCount = rangeQueryLocal(inPath, queryRange, shape, queryParams,
                                    collector);
                            resultsCounts.add(resultCount);
                        } catch (IOException e) {
                            e.printStackTrace();
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        } finally {
                            try {
                                if (outFile != null)
                                    outFile.close();
                            } catch (IOException e) {
                                e.printStackTrace();
                            }
                        }
                    }
                };
                thread.start();
                threads.add(thread);
            } else {
                // Run in MapReduce mode
                Path outTempPath = outPath == null ? null
                        : new Path(outPath, String.format("%05d", i) + "-" + inPath.getName());
                queryParams.setBoolean("background", true);
                Job job = rangeQueryMapReduce(inPath, outTempPath, queryParams);
                jobs.add(job);
            }
        }
    }

    while (!jobs.isEmpty()) {
        Job firstJob = jobs.firstElement();
        firstJob.waitForCompletion(false);
        if (!firstJob.isSuccessful()) {
            System.err.println("Error running job " + firstJob);
            System.err.println("Killing all remaining jobs");
            for (int j = 1; j < jobs.size(); j++)
                jobs.get(j).killJob();
            System.exit(1);
        }
        Counters counters = firstJob.getCounters();
        Counter outputRecordCounter = counters.findCounter(Task.Counter.MAP_OUTPUT_RECORDS);
        resultsCounts.add(outputRecordCounter.getValue());
        jobs.remove(0);
    }
    while (!threads.isEmpty()) {
        try {
            Thread thread = threads.firstElement();
            thread.join();
            threads.remove(0);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    long t2 = System.currentTimeMillis();
    System.out.println("QueryPlan:");
    for (Path stPath : STPaths) {
        System.out.println(stPath.getName());
    }
    System.out.println("Time for " + queryRanges.length + " jobs is " + (t2 - t1) + " millis");
    System.out.println("Results counts: " + resultsCounts);
}

From source file:com.xiangzhurui.util.ftp.ServerToServerFTP.java

public static void main(String[] args) {
    String server1, username1, password1, file1;
    String server2, username2, password2, file2;
    String[] parts;/*from   w  ww.  ja v  a  2  s  . c  om*/
    int port1 = 0, port2 = 0;
    FTPClient ftp1, ftp2;
    ProtocolCommandListener listener;

    if (args.length < 8) {
        System.err.println(
                "Usage: com.xzr.practice.util.ftp <host1> <user1> <pass1> <file1> <host2> <user2> <pass2> <file2>");
        System.exit(1);
    }

    server1 = args[0];
    parts = server1.split(":");
    if (parts.length == 2) {
        server1 = parts[0];
        port1 = Integer.parseInt(parts[1]);
    }
    username1 = args[1];
    password1 = args[2];
    file1 = args[3];
    server2 = args[4];
    parts = server2.split(":");
    if (parts.length == 2) {
        server2 = parts[0];
        port2 = Integer.parseInt(parts[1]);
    }
    username2 = args[5];
    password2 = args[6];
    file2 = args[7];

    listener = new PrintCommandListener(new PrintWriter(System.out), true);
    ftp1 = new FTPClient();
    ftp1.addProtocolCommandListener(listener);
    ftp2 = new FTPClient();
    ftp2.addProtocolCommandListener(listener);

    try {
        int reply;
        if (port1 > 0) {
            ftp1.connect(server1, port1);
        } else {
            ftp1.connect(server1);
        }
        System.out.println("Connected to " + server1 + ".");

        reply = ftp1.getReplyCode();

        if (!FTPReply.isPositiveCompletion(reply)) {
            ftp1.disconnect();
            System.err.println("FTP server1 refused connection.");
            System.exit(1);
        }
    } catch (IOException e) {
        if (ftp1.isConnected()) {
            try {
                ftp1.disconnect();
            } catch (IOException f) {
                // do nothing
            }
        }
        System.err.println("Could not connect to server1.");
        e.printStackTrace();
        System.exit(1);
    }

    try {
        int reply;
        if (port2 > 0) {
            ftp2.connect(server2, port2);
        } else {
            ftp2.connect(server2);
        }
        System.out.println("Connected to " + server2 + ".");

        reply = ftp2.getReplyCode();

        if (!FTPReply.isPositiveCompletion(reply)) {
            ftp2.disconnect();
            System.err.println("FTP server2 refused connection.");
            System.exit(1);
        }
    } catch (IOException e) {
        if (ftp2.isConnected()) {
            try {
                ftp2.disconnect();
            } catch (IOException f) {
                // do nothing
            }
        }
        System.err.println("Could not connect to server2.");
        e.printStackTrace();
        System.exit(1);
    }

    __main: try {
        if (!ftp1.login(username1, password1)) {
            System.err.println("Could not login to " + server1);
            break __main;
        }

        if (!ftp2.login(username2, password2)) {
            System.err.println("Could not login to " + server2);
            break __main;
        }

        // Let's just assume success for now.
        ftp2.enterRemotePassiveMode();

        ftp1.enterRemoteActiveMode(InetAddress.getByName(ftp2.getPassiveHost()), ftp2.getPassivePort());

        // Although you would think the store command should be sent to
        // server2
        // first, in reality, com.xzr.practice.util.ftp servers like wu-ftpd start accepting data
        // connections right after entering passive mode. Additionally, they
        // don't even send the positive preliminary reply until after the
        // transfer is completed (in the case of passive mode transfers).
        // Therefore, calling store first would hang waiting for a
        // preliminary
        // reply.
        if (ftp1.remoteRetrieve(file1) && ftp2.remoteStoreUnique(file2)) {
            // if(ftp1.remoteRetrieve(file1) && ftp2.remoteStore(file2)) {
            // We have to fetch the positive completion reply.
            ftp1.completePendingCommand();
            ftp2.completePendingCommand();
        } else {
            System.err.println("Couldn't initiate transfer.  Check that filenames are valid.");
            break __main;
        }

    } catch (IOException e) {
        e.printStackTrace();
        System.exit(1);
    } finally {
        try {
            if (ftp1.isConnected()) {
                ftp1.logout();
                ftp1.disconnect();
            }
        } catch (IOException e) {
            // do nothing
        }

        try {
            if (ftp2.isConnected()) {
                ftp2.logout();
                ftp2.disconnect();
            }
        } catch (IOException e) {
            // do nothing
        }
    }
}

From source file:edu.umass.cs.reconfiguration.deprecated.ReconfigurableClient.java

/**
 * Simple test client for the reconfiguration package. Clients only know the
 * set of all reconfigurators, not active replicas for any name. All
 * information about active replicas for a name is obtained from
 * reconfigurators. Any request can be sent to any reconfigurator and it
 * will forward to the appropriate reconfigurator if necessary and relay
 * back the response./*from   w ww.  ja v  a2s . c o m*/
 * 
 * @param args
 */
public static void main(String[] args) {
    ReconfigurableClient client = null;
    try {
        /*
         * Client can only send/receive clear text or do server-only
         * authentication
         */
        JSONMessenger<?> messenger = new JSONMessenger<String>((new MessageNIOTransport<String, JSONObject>(
                null, null, new PacketDemultiplexerDefault(), true, ReconfigurationConfig.getClientSSLMode())));
        client = new ReconfigurableClient(ReconfigurationConfig.getReconfiguratorAddresses(), messenger);
        int numRequests = 2;
        String requestValuePrefix = "request_value";
        long nameReqInterArrivalTime = 200;
        long NCReqInterArrivalTime = 1000;
        String initValue = "initial_value";
        int numIterations = 10000;
        boolean testReconfigureRC = true;

        for (int j = 0; j < numIterations; j++) {
            String namePrefix = "name" + (int) (Math.random() * Integer.MAX_VALUE);
            String reconfiguratorID = "RC" + (int) (Math.random() * 64000);
            long t0 = System.currentTimeMillis();

            // /////////////request active replicas////////////////////
            t0 = System.currentTimeMillis();
            do
                client.sendRequest(client.makeRequestActiveReplicas(namePrefix));
            while (!client.waitForFailure(namePrefix));
            DelayProfiler.updateDelay("requestActives", t0);

            // active replicas for name initially don't exist
            assert (client.getActiveReplicas() == null || client.getActiveReplicas().isEmpty());
            // ////////////////////////////////////////////////////////

            // ////////////////////create name/////////////////////////
            t0 = System.currentTimeMillis();
            do
                client.sendRequest(client.makeCreateNameRequest(namePrefix, initValue));
            while (!client.waitForSuccess(namePrefix));
            DelayProfiler.updateDelay("createName", t0);
            // ////////////////////////////////////////////////////////

            /*
             * Verify that active replicas for name now exist. The only
             * reason the query is repeated is because it is possible to
             * find the name non-existent briefly if the query is sent to a
             * different reconfigurator that hasn't yet caught up with the
             * creation (but will eventually do so).
             */
            // ////////////////////////////////////////////////////////
            t0 = System.currentTimeMillis();
            do
                client.sendRequest(client.makeRequestActiveReplicas(namePrefix));
            while (!client.waitForSuccess(namePrefix));
            DelayProfiler.updateDelay("requestActives", t0);

            assert (client.getActiveReplicas() != null && !client.getActiveReplicas().isEmpty());
            // ////////////////////////////////////////////////////////

            // ///////send a stream of app requests sequentially///////
            for (int i = 0; i < numRequests; i++) {
                t0 = System.currentTimeMillis();
                do
                    client.sendRequest(client.makeRequest(namePrefix, requestValuePrefix + i));
                while (!client.rcvdAppReply(namePrefix));
                DelayProfiler.updateDelay("appPaxosRequest", t0);
                Thread.sleep(nameReqInterArrivalTime);
            }
            // ////////////////////////////////////////////////////////

            // ////////////////////////////////////////////////////////
            // request current active replicas (possibly reconfigured)
            t0 = System.currentTimeMillis();
            do
                client.sendRequest(client.makeRequestActiveReplicas(namePrefix));
            while (!client.waitForSuccess(namePrefix));
            DelayProfiler.updateDelay("requestActives", t0);
            // ////////////////////////////////////////////////////////

            // ///////////////delete name, retransmit if error////////////
            t0 = System.currentTimeMillis();
            do
                client.sendRequest(client.makeDeleteNameRequest(namePrefix));
            while (!client.waitForSuccess(namePrefix));
            DelayProfiler.updateDelay("deleteName", t0);

            Thread.sleep(nameReqInterArrivalTime);
            // ////////////////////////////////////////////////////////

            // ////////////////////////////////////////////////////////
            // verify that active replicas for name now don't exist. The
            t0 = System.currentTimeMillis();
            do
                client.sendRequest(client.makeRequestActiveReplicas(namePrefix));
            while (!client.waitForFailure(namePrefix));
            DelayProfiler.updateDelay("requestActives", t0);

            assert (client.getActiveReplicas() == null || client.getActiveReplicas().isEmpty());
            // ////////////////////////////////////////////////////////

            if (!testReconfigureRC)
                continue;

            // ////////////////////////////////////////////////////////
            // add RC node; the port below does not matter in this test
            t0 = System.currentTimeMillis();
            // do
            client.sendRequest(new ReconfigureRCNodeConfig<String>(null, reconfiguratorID,
                    new InetSocketAddress(InetAddress.getByName("localhost"), TEST_PORT)));
            while (!client
                    .waitForReconfigureRCSuccess(AbstractReconfiguratorDB.RecordNames.RC_NODES.toString()))
                ;
            DelayProfiler.updateDelay("addReconfigurator", t0);
            // ////////////////////////////////////////////////////////

            Thread.sleep(NCReqInterArrivalTime);

            // //////////////// delete just added RC node//////////////////
            HashSet<String> deleted = new HashSet<String>();
            deleted.add(reconfiguratorID);
            t0 = System.currentTimeMillis();
            // do
            client.sendRequest(new ReconfigureRCNodeConfig<String>(null, null, deleted));
            while (!client
                    .waitForReconfigureRCSuccess(AbstractReconfiguratorDB.RecordNames.RC_NODES.toString())) {
            }
            DelayProfiler.updateDelay("removeReconfigurator", t0);
            // ////////////////////////////////////////////////////////

            Thread.sleep(NCReqInterArrivalTime);

            client.log.info("\n\n\n\n==================Successfully completed iteration " + j + ":\n"
                    + DelayProfiler.getStats() + "\n\n\n\n");
        }

        // client.messenger.stop();
    } catch (IOException ioe) {
        ioe.printStackTrace();
    } catch (JSONException je) {
        je.printStackTrace();
    } catch (InterruptedException ie) {
        ie.printStackTrace();
    } catch (RequestParseException e) {
        e.printStackTrace();
    }
}

From source file:com.icesoft.faces.webapp.parser.TagToComponentMap.java

/**
 * Main method for when this class is run to build the serialized data from
 * a set of TLDS.//from www . j  a va2  s . c o  m
 *
 * @param args The runtime arguements.
 */
public static void main(String args[]) {

    /* arg[0] is "new" to create serialzed data or 'old' to read serialized data
       arg[1] is filename for serialized data;
       arg[2...] are tld's to process */

    FileInputStream tldFile = null;

    TagToComponentMap map = new TagToComponentMap();

    if (args[0].equals("new")) {
        // Build new component map from tlds and serialize it;

        for (int i = 2; i < args.length; i++) {
            try {
                tldFile = new FileInputStream(args[i]);
                map.addTagAttrib((InputStream) tldFile);
            } catch (IOException e) {
                e.printStackTrace();
                return;
            }
        }

        try {
            FileOutputStream fos = new FileOutputStream(args[1]);
            ObjectOutputStream oos = new ObjectOutputStream(fos);
            oos.writeObject(map);
            oos.flush();
            oos.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    } else if (args[0].equals("old")) {
        // Build component from serialized data;
        try {
            FileInputStream fis = new FileInputStream(args[1]);
            ObjectInputStream ois = new ObjectInputStream(fis);
            map = (TagToComponentMap) ois.readObject();
        } catch (Exception e) {
            e.printStackTrace();
        }
    } else if (args[0].equals("facelets")) {
        // Build new component map from tld, and use that to
        //  generate a Facelets taglib.xml
        // args[0] is command
        // args[1] is output taglib.xml
        // args[2] is input tld

        try {
            FileWriter faceletsTaglibXmlWriter = new FileWriter(args[1]);
            String preamble = "<?xml version=\"1.0\"?>\n"
                    + "<facelet-taglib xmlns=\"http://java.sun.com/xml/ns/javaee\"\n"
                    + "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n"
                    + "xsi:schemaLocation=\"http://java.sun.com/xml/ns/javaee "
                    + "http://java.sun.com/xml/ns/javaee/web-facelettaglibrary_2_0.xsd\"\n"
                    + "version=\"2.0\">\n";

            String trailer = "</facelet-taglib>\n";
            faceletsTaglibXmlWriter.write(preamble);

            map.setFaceletsTaglibXmlWriter(faceletsTaglibXmlWriter);
            tldFile = new FileInputStream(args[2]);
            map.addTagAttrib((InputStream) tldFile);

            faceletsTaglibXmlWriter.write(trailer);
            faceletsTaglibXmlWriter.flush();
            faceletsTaglibXmlWriter.close();
        } catch (IOException e) {
            e.printStackTrace();
            return;
        }
    }
}

From source file:de.uni_koblenz.ist.utilities.license_header.LicenseHeader.java

public static void main(String[] args) {
    CommandLine cl = processCommandLineOptions(args);
    assert cl.hasOption('i');
    assert cl.hasOption('l');
    LicenseHeader lh = new LicenseHeader(cl.getOptionValue('i'), cl.getOptionValue('l'), cl.hasOption('r'),
            cl.hasOption('V'));
    try {//from  w w w .  j a  v a  2s .com
        lh.process();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:com.glaf.core.util.ZipUtils.java

public static void main(String[] args) {
    String filename = "user.xml";
    if (args != null && args.length == 1) {
        filename = args[0];/*ww w.j ava2 s  .c o m*/
    }
    byte[] bytes = FileUtils.getBytes(filename);
    byte[] input = null;
    byte[] zipBytes = null;
    try {
        zipBytes = org.xerial.snappy.Snappy.compress(bytes);
        FileUtils.save("output", zipBytes);
        input = FileUtils.getBytes("output");
        bytes = org.xerial.snappy.Snappy.uncompress(input);
        FileUtils.save("input", bytes);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.kse.bigdata.main.Driver.java

public static void main(String[] args) throws Exception {
    /**********************************************************************************
     **    Merge the source files into one.                                          **
    /**    Should change the directories of each file before executing the program   **
    ***********************************************************************************/
    //        String inputFileDirectory = "/media/bk/??/BigData_Term_Project/Debug";
    //        String resultFileDirectory = "/media/bk/??/BigData_Term_Project/debug.csv";
    //        File resultFile = new File(resultFileDirectory);
    //        if(!resultFile.exists())
    //            new SourceFileMerger(inputFileDirectory, resultFileDirectory).mergeFiles();

    /**********************************************************************************
     * Hadoop Operation./*from  w  w  w.  j a  v  a 2 s . co  m*/
     * Befort Start, Check the Length of Sequence We Want to Predict.
     **********************************************************************************/

    Configuration conf = new Configuration();

    //Enable MapReduce intermediate compression as Snappy
    conf.setBoolean("mapred.compress.map.output", true);
    conf.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.SnappyCodec");

    //Enable Profiling
    //conf.setBoolean("mapred.task.profile", true);

    String testPath = null;
    String inputPath = null;
    String outputPath = null;

    int sampleSize = 1;
    ArrayList<String> results = new ArrayList<String>();

    for (int index = 0; index < args.length; index++) {

        /*
         * Mandatory command
         */
        //Extract input path string from command line.
        if (args[index].equals("-in"))
            inputPath = args[index + 1];

        //Extract output path string from command line.
        if (args[index].equals("-out"))
            outputPath = args[index + 1];

        //Extract test data path string from command line.
        if (args[index].equals("-test"))
            testPath = args[index + 1];

        /*
         * Optional command
         */
        //Extract a number of neighbors.
        if (args[index].equals("-nn"))
            conf.setInt(Reduce.NUMBER_OF_NEAREAST_NEIGHBOR, Integer.parseInt(args[index + 1]));

        //Whether job uses normalization or not.
        if (args[index].equals("-norm"))
            conf.setBoolean(Map.NORMALIZATION, true);

        //Extract the number of sample size to test.
        if (args[index].equals("-s"))
            sampleSize = Integer.valueOf(args[index + 1]);

        //Whether job uses mean or median
        //[Default : mean]
        if (args[index].equals("-med"))
            conf.setBoolean(Reduce.MEDIAN, true);
    }

    String outputFileName = "part-r-00000";
    SequenceSampler sampler = new SequenceSampler(testPath, sampleSize);
    LinkedList<Sequence> testSequences = sampler.getRandomSample();

    //        Test Sequence
    //        String testSeqString = "13.591-13.674-13.778-13.892-13.958-14.049-14.153-14.185-14.169-14.092-13.905-13.702-13.438-13.187-13.0-12.914-12.868-12.766-12.62-12.433-12.279-12.142-12.063-12.025-100";
    //        Sequence testSeq = new Sequence(testSeqString);
    //        LinkedList<Sequence> testSequences = new LinkedList<>();
    //        testSequences.add(testSeq);

    for (Sequence seq : testSequences) {

        /*
         ********************  Hadoop Launch ***********************
         */

        System.out.println(seq.getTailString());

        conf.set(Map.INPUT_SEQUENCE, seq.toString());

        Job job = new Job(conf);
        job.setJarByClass(Driver.class);
        job.setJobName("term-project-driver");

        job.setMapperClass(Map.class);
        job.setMapOutputKeyClass(NullWritable.class);
        job.setMapOutputValueClass(Text.class);

        //          Should think another way to implement the combiner class
        //          Current Implementation is not helpful to Job.
        //          job.setCombinerClass(Combiner.class);

        //Set 1 for number of reduce task for keeping 100 most neighbors in sorted set.
        job.setNumReduceTasks(1);
        job.setReducerClass(Reduce.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        FileInputFormat.setInputPaths(job, new Path(inputPath));
        FileOutputFormat.setOutputPath(job, new Path(outputPath));

        job.waitForCompletion(true);

        /*
         * if job finishes, get result of the job and store it in results(list).
         */
        try {
            FileSystem hdfs = FileSystem.get(new Configuration());
            BufferedReader fileReader = new BufferedReader(
                    new InputStreamReader(hdfs.open(new Path(outputPath + "/" + outputFileName))));

            String line;
            while ((line = fileReader.readLine()) != null) {
                results.add(seq.getSeqString() + " " + line);
            }

            fileReader.close();

            hdfs.delete(new Path(outputPath), true);
            hdfs.close();

        } catch (IOException e) {
            e.printStackTrace();
            System.exit(1);
        }
    }

    /*
     * if all jobs finish, store results of jobs to output/result.txt file.
     */
    String finalOutputPath = "output/result.csv";
    try {
        FileSystem hdfs = FileSystem.get(new Configuration());
        Path file = new Path(finalOutputPath);
        if (hdfs.exists(file)) {
            hdfs.delete(file, true);
        }

        OutputStream os = hdfs.create(file);
        PrintWriter printWriter = new PrintWriter(new OutputStreamWriter(os, "UTF-8"));

        //CSV File Header
        printWriter.println("Actual,Predicted,MER,MAE");
        printWriter.flush();

        for (String result : results) {
            String[] tokens = result.split("\\s+");

            printWriter.println(tokens[0] + "," + tokens[1] + "," + tokens[2] + "," + tokens[3]);
            printWriter.flush();
        }

        printWriter.close();
        hdfs.close();
    } catch (IOException e) {
        e.printStackTrace();
        System.exit(1);
    }

}