Example usage for java.util List size

List of usage examples for java.util List size

Introduction

In this page you can find the example usage for java.util List size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this list.

Usage

From source file:ImageStringToBlob.java

public static void main(String[] args) {
    Connection conn = null;//w w  w  . ja  va  2s.  c o  m

    if (args.length != 1) {
        System.out.println("Missing argument: full path to <oscar.properties>");
        return;
    }

    try {

        FileInputStream fin = new FileInputStream(args[0]);
        Properties prop = new Properties();
        prop.load(fin);

        String driver = prop.getProperty("db_driver");
        String uri = prop.getProperty("db_uri");
        String db = prop.getProperty("db_name");
        String username = prop.getProperty("db_username");
        String password = prop.getProperty("db_password");

        Class.forName(driver);
        conn = DriverManager.getConnection(uri + db, username, password);
        conn.setAutoCommit(true); // no transactions

        /*
         * select all records ids with image_data not null and contents is null
         * for each id fetch record
         * migrate data from image_data to contents
         */
        String sql = "select image_id from client_image where image_data is not null and contents is null";
        PreparedStatement pst = conn.prepareStatement(sql);
        ResultSet rs = pst.executeQuery();
        List<Long> ids = new ArrayList<Long>();

        while (rs.next()) {
            ids.add(rs.getLong("image_id"));
        }

        rs.close();

        sql = "select image_data from client_image where image_id = ?";
        pst = conn.prepareStatement(sql);

        System.out.println("Migrating image data for " + ids.size() + " images...");
        for (Long id : ids) {
            pst.setLong(1, id);
            ResultSet imagesRS = pst.executeQuery();
            while (imagesRS.next()) {
                String dataString = imagesRS.getString("image_data");
                Blob dataBlob = fromStringToBlob(dataString);
                if (writeBlobToDb(conn, id, dataBlob) == 1) {
                    System.out.println("Image data migrated for image_id: " + id);
                }
            }
            imagesRS.close();
        }
        System.out.println("Migration completed.");

    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:de.tudarmstadt.ukp.experiments.argumentation.convincingness.sampling.Step2ArgumentPairsSampling.java

public static void main(String[] args) throws Exception {
    String inputDir = args[0];//from w  w  w .j a  v  a  2  s  .c o m

    // /tmp
    File outputDir = new File(args[1]);
    if (!outputDir.exists()) {
        outputDir.mkdirs();
    }

    // pseudo-random
    final Random random = new Random(1);

    int totalPairsCount = 0;

    // read all debates
    for (File file : IOHelper.listXmlFiles(new File(inputDir))) {
        Debate debate = DebateSerializer.deserializeFromXML(FileUtils.readFileToString(file, "utf-8"));

        // get two stances
        SortedSet<String> originalStances = debate.getStances();

        // cleaning: some debate has three or more stances (data are inconsistent)
        // remove those with only one argument
        SortedSet<String> stances = new TreeSet<>();
        for (String stance : originalStances) {
            if (debate.getArgumentsForStance(stance).size() > 1) {
                stances.add(stance);
            }
        }

        if (stances.size() != 2) {
            throw new IllegalStateException(
                    "2 stances per debate expected, was " + stances.size() + ", " + stances);
        }

        // for each stance, get pseudo-random N arguments
        for (String stance : stances) {
            List<Argument> argumentsForStance = debate.getArgumentsForStance(stance);

            // shuffle
            Collections.shuffle(argumentsForStance, random);

            // and get max first N arguments
            List<Argument> selectedArguments = argumentsForStance.subList(0,
                    argumentsForStance.size() < MAX_SELECTED_ARGUMENTS_PRO_SIDE ? argumentsForStance.size()
                            : MAX_SELECTED_ARGUMENTS_PRO_SIDE);

            List<ArgumentPair> argumentPairs = new ArrayList<>();

            // now create pairs
            for (int i = 0; i < selectedArguments.size(); i++) {
                for (int j = (i + 1); j < selectedArguments.size(); j++) {
                    Argument arg1 = selectedArguments.get(i);
                    Argument arg2 = selectedArguments.get(j);

                    ArgumentPair argumentPair = new ArgumentPair();
                    argumentPair.setDebateMetaData(debate.getDebateMetaData());

                    // assign arg1 and arg2 pseudo-randomly
                    // (not to have the same argument as arg1 all the time)
                    if (random.nextBoolean()) {
                        argumentPair.setArg1(arg1);
                        argumentPair.setArg2(arg2);
                    } else {
                        argumentPair.setArg1(arg2);
                        argumentPair.setArg2(arg1);
                    }

                    // set unique id
                    argumentPair.setId(argumentPair.getArg1().getId() + "_" + argumentPair.getArg2().getId());

                    argumentPairs.add(argumentPair);
                }
            }

            String fileName = IOHelper.createFileName(debate.getDebateMetaData(), stance);

            File outputFile = new File(outputDir, fileName);

            // and save all sampled pairs into a XML file
            XStreamTools.toXML(argumentPairs, outputFile);

            System.out.println("Saved " + argumentPairs.size() + " pairs to " + outputFile);

            totalPairsCount += argumentPairs.size();
        }

    }

    System.out.println("Total pairs generated: " + totalPairsCount);
}

From source file:Main.java

public static void main(String[] args) {
    List<Integer> list = new ArrayList<>();
    long expectedSum = 0;
    for (int i = 0; i < 10000; i++) {
        int random = 1 + (int) (Math.random() * ((100 - 1) + 1));
        list.add(random);/*from   www . j  a  v  a2 s . c  o  m*/
        expectedSum += random;
    }
    System.out.println("expected sum: " + expectedSum);
    ForkJoinPool forkJoinPool = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    RecursiveSum recursiveSum = new RecursiveSum(list, 0, list.size());
    long recSum = forkJoinPool.invoke(recursiveSum);
    System.out.println("recursive-sum: " + recSum);
}

From source file:gr.demokritos.iit.demos.Demo.java

public static void main(String[] args) {
    try {/*from   w w w  .  j  a va  2  s  . c o  m*/
        Options options = new Options();
        options.addOption("h", HELP, false, "show help.");
        options.addOption("i", INPUT, true,
                "The file containing JSON " + " representations of tweets or SAG posts - 1 per line"
                        + " default file looked for is " + DEFAULT_INFILE);
        options.addOption("o", OUTPUT, true,
                "Where to write the output " + " default file looked for is " + DEFAULT_OUTFILE);
        options.addOption("p", PROCESS, true, "Type of processing to do "
                + " ner for Named Entity Recognition re for Relation Extraction" + " default is NER");
        options.addOption("s", SAG, false,
                "Whether to process as SAG posts" + " default is off - if passed means process as SAG posts");

        CommandLineParser parser = new BasicParser();
        CommandLine cmd = parser.parse(options, args);
        // DEFAULTS
        String filename = DEFAULT_INFILE;
        String outfilename = DEFAULT_OUTFILE;
        String process = NER;
        boolean isSAG = false;

        if (cmd.hasOption(HELP)) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("NER + RE extraction module", options);
            System.exit(0);
        }
        if (cmd.hasOption(INPUT)) {
            filename = cmd.getOptionValue(INPUT);
        }
        if (cmd.hasOption(OUTPUT)) {
            outfilename = cmd.getOptionValue(OUTPUT);
        }
        if (cmd.hasOption(SAG)) {
            isSAG = true;
        }
        if (cmd.hasOption(PROCESS)) {
            process = cmd.getOptionValue(PROCESS);
        }
        System.out.println();
        System.out.println("Reading from file: " + filename);
        System.out.println("Process type: " + process);
        System.out.println("Processing SAG: " + isSAG);
        System.out.println("Writing to file: " + outfilename);
        System.out.println();

        List<String> jsoni = new ArrayList();
        Scanner in = new Scanner(new FileReader(filename));
        while (in.hasNextLine()) {
            String json = in.nextLine();
            jsoni.add(json);
        }
        PrintWriter writer = new PrintWriter(outfilename, "UTF-8");
        System.out.println("Read " + jsoni.size() + " lines from " + filename);
        if (process.equalsIgnoreCase(RE)) {
            System.out.println("Running Relation Extraction");
            System.out.println();
            String json = API.RE(jsoni, isSAG);
            System.out.println(json);
            writer.print(json);
        } else {
            System.out.println("Running Named Entity Recognition");
            System.out.println();
            jsoni = API.NER(jsoni, isSAG);
            /*
            for(String json: jsoni){
               NamedEntityList nel = NamedEntityList.fromJSON(json);
               nel.prettyPrint();
            }
            */
            for (String json : jsoni) {
                System.out.println(json);
                writer.print(json);
            }
        }
        writer.close();
    } catch (ParseException | UnsupportedEncodingException | FileNotFoundException ex) {
        Logger.getLogger(Demo.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:br.com.autonomiccs.cloudTraces.main.GoogleTracesToCloudTracesParser.java

public static void main(String[] args) {
    validateArguments(args);/*ww w . j  av a2s  .c  o  m*/
    List<GoogleTrace> googleTraces = readAllGoogleTracesFromDataset(args[0]);

    logger.info(String.format("#Google traces loaded [%d]", googleTraces.size()));
    Collection<GoogleJob> googleJobs = buildTasksHierachyAndCreateJobList(googleTraces);
    buildJobsTaksByTimeMap(googleJobs);
    fillOutStartAndEndTimeOfJobs(googleJobs);
    calculateThePeakJobResourceUsage(googleJobs);

    GoogleJob biggestCpuUsageJob = googleJobs.iterator().next();
    GoogleJob biggestMemoryUsageJob = biggestCpuUsageJob;

    GoogleJob lowestCpuUsageJob = googleJobs.iterator().next();
    GoogleJob lowestMemoryUsageJob = lowestCpuUsageJob;

    for (GoogleJob googleJob : googleJobs) {
        if (biggestCpuUsageJob.getMaximumCpuUsageAtTime() < googleJob.getMaximumCpuUsageAtTime()) {
            biggestCpuUsageJob = googleJob;
        }
        if (biggestMemoryUsageJob.getMaximumMemoryUsageAtTime() < googleJob.getMaximumMemoryUsageAtTime()) {
            biggestMemoryUsageJob = googleJob;
        }

        if (lowestCpuUsageJob.getMaximumCpuUsageAtTime() > googleJob.getMaximumCpuUsageAtTime()) {
            lowestCpuUsageJob = googleJob;
        }
        if (lowestMemoryUsageJob.getMaximumMemoryUsageAtTime() > googleJob.getMaximumMemoryUsageAtTime()) {
            lowestMemoryUsageJob = googleJob;
        }
    }
    logger.info("Max job cpu usage: " + biggestCpuUsageJob);
    logger.info("Max job memory usage: " + biggestMemoryUsageJob);
    logger.info("Min job cpu usage: " + lowestCpuUsageJob);
    logger.info("Min job memory usage: " + lowestMemoryUsageJob);

    List<VirtualMachine> virtualMachines = createVmsToExecuteJobs(googleJobs);
    writeVmTracesToFile(virtualMachines);
}

From source file:com.ibm.watson.app.qaclassifier.tools.GenerateTrainingAndPopulationData.java

public static void main(String[] args) throws IOException {
    System.out.println(MessageKey.AQWQAC20007I_starting_generate_training_and_populating.getMessage()
            .getFormattedMessage());/* w ww . j  ava2s  .  com*/

    // handle reading the command line parameters and initializing the files
    readCommandLineParameters(args);
    System.out.println(MessageKey.AQWQAC20008I_cmd_line_param_read.getMessage().getFormattedMessage());

    // process the answers input file and create the in-memory store for it
    List<ManagedAnswer> answers = PopulateAnswerStore.loadAnswerStore(answerInput.getPath(),
            answerTextDirectory.getPath());
    if (answers == null || answers.size() == 0) {
        System.err.println(
                MessageKey.AQWQAC24010E_answer_store_unable_to_load.getMessage().getFormattedMessage());
        System.exit(0);
    }
    System.out.println(MessageKey.AQWQAC20004I_answer_input_file_read.getMessage().getFormattedMessage());

    // process the questions input file and create the in-memory store for it
    NLClassifierTrainingData training = readQuestionInput(answers);
    if (training == null || training.getTrainingData() == null || training.getTrainingData().size() == 0) {
        System.err.println(
                MessageKey.AQWQAC24010E_answer_store_unable_to_load.getMessage().getFormattedMessage());
        System.exit(0);
    }
    System.out.println(MessageKey.AQWQAC24005I_question_input_file_read.getMessage().getFormattedMessage());

    // write the answer store population file
    // create the gson object that is doing all the writing
    Gson gson = new GsonBuilder().setPrettyPrinting().create();
    writeGSON(gson.toJson(answers), answerOutput);
    System.out.println(MessageKey.AQWQAC24006I_answer_output_file_written.getMessage().getFormattedMessage());

    // write the classifier training file
    writeGSON(training.toJson(), questionOutput);
    System.out.println(MessageKey.AQWQAC24007I_training_data_file_written.getMessage().getFormattedMessage());
}

From source file:is.hi.bok.deduplicator.DigestIndexer.java

@SuppressWarnings({ "unchecked", "rawtypes" })
public static void main(String[] args) throws Exception {
    CommandLineParser clp = new CommandLineParser(args, new PrintWriter(System.out));
    long start = System.currentTimeMillis();

    // Set default values for all settings.
    boolean etag = false;
    boolean equivalent = false;
    boolean timestamp = false;
    String indexMode = MODE_BOTH;
    boolean addToIndex = false;
    String mimefilter = "^text/.*";
    boolean blacklist = true;
    String iteratorClassName = CrawlLogIterator.class.getName();
    String origin = null;/*from w  ww.  jav a2s .  co m*/
    boolean skipDuplicates = false;

    // Process the options
    Option[] opts = clp.getCommandLineOptions();
    for (int i = 0; i < opts.length; i++) {
        Option opt = opts[i];
        switch (opt.getId()) {
        case 'w':
            blacklist = false;
            break;
        case 'a':
            addToIndex = true;
            break;
        case 'e':
            etag = true;
            break;
        case 'h':
            clp.usage(0);
            break;
        case 'i':
            iteratorClassName = opt.getValue();
            break;
        case 'm':
            mimefilter = opt.getValue();
            break;
        case 'o':
            indexMode = opt.getValue();
            break;
        case 's':
            equivalent = true;
            break;
        case 't':
            timestamp = true;
            break;
        case 'r':
            origin = opt.getValue();
            break;
        case 'd':
            skipDuplicates = true;
            break;
        default:
            System.err.println("Unhandled option id: " + opt.getId());
        }
    }

    List cargs = clp.getCommandLineArguments();

    if (cargs.size() != 2) {
        // Should be exactly two arguments. Source and target!
        clp.usage(0);
    }

    // Get the CrawlDataIterator
    // Get the iterator classname or load default.
    Class cl = Class.forName(iteratorClassName);
    Constructor co = cl.getConstructor(new Class[] { String.class });
    CrawlDataIterator iterator = (CrawlDataIterator) co.newInstance(new Object[] { (String) cargs.get(0) });

    // Print initial stuff
    System.out.println("Indexing: " + cargs.get(0));
    System.out.println(" - Mode: " + indexMode);
    System.out.println(" - Mime filter: " + mimefilter + " (" + (blacklist ? "blacklist" : "whitelist") + ")");
    System.out.println(" - Includes" + (equivalent ? " <equivalent URL>" : "")
            + (timestamp ? " <timestamp>" : "") + (etag ? " <etag>" : ""));
    System.out.println(" - Skip duplicates: " + (skipDuplicates ? "yes" : "no"));
    System.out.println(" - Iterator: " + iteratorClassName);
    System.out.println("   - " + iterator.getSourceType());
    System.out.println("Target: " + cargs.get(1));
    if (addToIndex) {
        System.out.println(" - Add to existing index (if any)");
    } else {
        System.out.println(" - New index (erases any existing index at " + "that location)");
    }

    DigestIndexer di = new DigestIndexer((String) cargs.get(1), indexMode, equivalent, timestamp, etag,
            addToIndex);

    // Create the index
    di.writeToIndex(iterator, mimefilter, blacklist, origin, true, skipDuplicates);

    // Clean-up
    di.close();

    System.out.println("Total run time: "
            + ArchiveUtils.formatMillisecondsToConventional(System.currentTimeMillis() - start));
}

From source file:drpc.KMeansDrpcQuery.java

public static void main(final String[] args)
        throws IOException, TException, DRPCExecutionException, DecoderException, ClassNotFoundException {
    if (args.length < 3) {
        System.err.println("Where are the arguments? args -- DrpcServer DrpcFunctionName folder");
        return;/*from  w ww.  j av a 2 s.  c  o m*/
    }

    final DRPCClient client = new DRPCClient(args[0], 3772, 1000000 /*timeout*/);
    final Queue<String> featureFiles = new ArrayDeque<String>();
    SpoutUtils.listFilesForFolder(new File(args[2]), featureFiles);

    Scanner scanner = new Scanner(featureFiles.peek());
    int i = 0;
    while (scanner.hasNextLine() && i++ < 10) {
        List<Map<String, List<Double>>> dict = SpoutUtils.pythonDictToJava(scanner.nextLine());
        for (Map<String, List<Double>> map : dict) {
            i++;

            Double[] features = map.get("chi2").toArray(new Double[0]);
            Double[] moreFeatures = map.get("chi1").toArray(new Double[0]);
            Double[] rmsd = map.get("rmsd").toArray(new Double[0]);
            Double[] both = (Double[]) ArrayUtils.addAll(features, moreFeatures);
            String parameters = serializeFeatureVector(ArrayUtils.toPrimitive(both));

            String centroidsSerialized = runQuery(args[1], parameters, client);

            Gson gson = new Gson();
            Object[] deserialized = gson.fromJson(centroidsSerialized, Object[].class);

            for (Object obj : deserialized) {
                // result we get is of the form List<result>
                List l = ((List) obj);
                centroidsSerialized = (String) l.get(0);

                String[] centroidSerializedArrays = centroidsSerialized
                        .split(MlStormClustererQuery.KmeansClustererQuery.CENTROID_DELIM);
                List<double[]> centroids = new ArrayList<double[]>();
                for (String centroid : centroidSerializedArrays) {
                    centroids.add(MlStormFeatureVectorUtils.deserializeToFeatureVector(centroid));
                }

                double[] rmsdPrimitive = ArrayUtils.toPrimitive(both);
                double[] rmsdKmeans = new double[centroids.size()];

                for (int k = 0; k < centroids.size(); k++) {
                    System.out.println("centroid        -- " + Arrays.toString(centroids.get(k)));
                    double[] centroid = centroids.get(k);
                    rmsdKmeans[k] = computeRootMeanSquare(centroid);
                }

                System.out.println("1 rmsd original -- " + Arrays.toString(rmsd));
                System.out.println("2 rmsd k- Means -- " + Arrays.toString(rmsdKmeans));
                System.out.println();
            }

        }
    }
    client.close();
}

From source file:com.cyberway.issue.io.warc.WARCReader.java

/**
 * Command-line interface to WARCReader.
 *
 * Here is the command-line interface://ww  w.  j  a v  a 2 s.  c  om
 * <pre>
 * usage: java com.cyberway.issue.io.arc.WARCReader [--offset=#] ARCFILE
 *  -h,--help      Prints this message and exits.
 *  -o,--offset    Outputs record at this offset into arc file.</pre>
 *
 * <p>Outputs using a pseudo-CDX format as described here:
 * <a href="http://www.archive.org/web/researcher/cdx_legend.php">CDX
 * Legent</a> and here
 * <a href="http://www.archive.org/web/researcher/example_cdx.php">Example</a>.
 * Legend used in below is: 'CDX b e a m s c V (or v if uncompressed) n g'.
 * Hash is hard-coded straight SHA-1 hash of content.
 *
 * @param args Command-line arguments.
 * @throws ParseException Failed parse of the command line.
 * @throws IOException
 * @throws java.text.ParseException
 */
public static void main(String[] args) throws ParseException, IOException, java.text.ParseException {
    Options options = getOptions();
    PosixParser parser = new PosixParser();
    CommandLine cmdline = parser.parse(options, args, false);
    List cmdlineArgs = cmdline.getArgList();
    Option[] cmdlineOptions = cmdline.getOptions();
    HelpFormatter formatter = new HelpFormatter();

    // If no args, print help.
    if (cmdlineArgs.size() <= 0) {
        usage(formatter, options, 0);
    }

    // Now look at options passed.
    long offset = -1;
    boolean digest = false;
    boolean strict = false;
    String format = CDX;
    for (int i = 0; i < cmdlineOptions.length; i++) {
        switch (cmdlineOptions[i].getId()) {
        case 'h':
            usage(formatter, options, 0);
            break;

        case 'o':
            offset = Long.parseLong(cmdlineOptions[i].getValue());
            break;

        case 's':
            strict = true;
            break;

        case 'd':
            digest = getTrueOrFalse(cmdlineOptions[i].getValue());
            break;

        case 'f':
            format = cmdlineOptions[i].getValue().toLowerCase();
            boolean match = false;
            // List of supported formats.
            final String[] supportedFormats = { CDX, DUMP, GZIP_DUMP, CDX_FILE };
            for (int ii = 0; ii < supportedFormats.length; ii++) {
                if (supportedFormats[ii].equals(format)) {
                    match = true;
                    break;
                }
            }
            if (!match) {
                usage(formatter, options, 1);
            }
            break;

        default:
            throw new RuntimeException("Unexpected option: " + +cmdlineOptions[i].getId());
        }
    }

    if (offset >= 0) {
        if (cmdlineArgs.size() != 1) {
            System.out.println("Error: Pass one arcfile only.");
            usage(formatter, options, 1);
        }
        WARCReader r = WARCReaderFactory.get(new File((String) cmdlineArgs.get(0)), offset);
        r.setStrict(strict);
        outputRecord(r, format);
    } else {
        for (Iterator i = cmdlineArgs.iterator(); i.hasNext();) {
            String urlOrPath = (String) i.next();
            try {
                WARCReader r = WARCReaderFactory.get(urlOrPath);
                r.setStrict(strict);
                r.setDigest(digest);
                output(r, format);
            } catch (RuntimeException e) {
                // Write out name of file we failed on to help with
                // debugging.  Then print stack trace and try to keep
                // going.  We do this for case where we're being fed
                // a bunch of ARCs; just note the bad one and move
                // on to the next.
                System.err.println("Exception processing " + urlOrPath + ": " + e.getMessage());
                e.printStackTrace(System.err);
                System.exit(1);
            }
        }
    }
}

From source file:br.usp.poli.lta.cereda.spa2run.Main.java

public static void main(String[] args) {

    Utils.printBanner();// w  w  w .  j  av  a  2  s.  co m
    CommandLineParser parser = new DefaultParser();

    try {
        CommandLine line = parser.parse(Utils.getOptions(), args);
        List<Spec> specs = Utils.fromFilesToSpecs(line.getArgs());
        List<Metric> metrics = Utils.fromFilesToMetrics(line);
        Utils.setMetrics(metrics);
        Utils.resetCalculations();
        AdaptiveAutomaton automaton = Utils.getAutomatonFromSpecs(specs);

        System.out.println("SPA generated successfully:");
        System.out.println("- " + specs.size() + " submachine(s) found.");
        if (!Utils.detectEpsilon(automaton)) {
            System.out.println("- No empty transitions.");
        }
        if (!metrics.isEmpty()) {
            System.out.println("- " + metrics.size() + " metric(s) found.");
        }

        System.out.println("\nStarting shell, please wait...\n" + "(press CTRL+C or type `:quit'\n"
                + "to exit the application)\n");

        String query = "";
        Scanner scanner = new Scanner(System.in);
        String prompt = "[%d] query> ";
        String result = "[%d] result> ";
        int counter = 1;
        do {

            try {
                String term = String.format(prompt, counter);
                System.out.print(term);
                query = scanner.nextLine().trim();
                if (!query.equals(":quit")) {
                    boolean accept = automaton.recognize(Utils.toSymbols(query));
                    String type = automaton.getRecognitionPaths().size() == 1 ? " (deterministic)"
                            : " (nondeterministic)";
                    System.out.println(String.format(result, counter) + accept + type);

                    if (!metrics.isEmpty()) {
                        System.out.println(StringUtils.repeat(" ", String.format("[%d] ", counter).length())
                                + Utils.prettyPrintMetrics());
                    }

                    System.out.println();

                }
            } catch (Exception exception) {
                System.out.println();
                Utils.printException(exception);
                System.out.println();
            }

            counter++;
            Utils.resetCalculations();

        } while (!query.equals(":quit"));
        System.out.println("That's all folks!");

    } catch (ParseException nothandled) {
        Utils.printHelp();
    } catch (Exception exception) {
        Utils.printException(exception);
    }
}