Example usage for java.util Iterator hasNext

List of usage examples for java.util Iterator hasNext

Introduction

In this page you can find the example usage for java.util Iterator hasNext.

Prototype

boolean hasNext();

Source Link

Document

Returns true if the iteration has more elements.

Usage

From source file:net.padaf.xmpbox.parser.XMLValueTypeDescriptionManager.java

/**
 * Sample of using to write/read information
 * /*from  www.  j a  v a2 s . co m*/
 * @param args
 *            not used
 * @throws BuildPDFAExtensionSchemaDescriptionException
 *             When errors during building/reading xml file
 */
public static void main(String[] args) throws BuildPDFAExtensionSchemaDescriptionException {
    XMLValueTypeDescriptionManager vtMaker = new XMLValueTypeDescriptionManager();

    // add Descriptions
    for (int i = 0; i < 3; i++) {
        vtMaker.addValueTypeDescription("testType" + i, "nsURI" + i, "prefix" + i, "description" + i);

    }
    List<FieldDescription> fieldSample = new ArrayList<FieldDescription>();
    for (int i = 0; i < 2; i++) {
        fieldSample.add(new FieldDescription("fieldName" + i, "valueType" + i, "description" + i));
    }
    vtMaker.addValueTypeDescription("testTypeField", "http://test.withfield.com/vt/", "prefTest",
            " value type description", fieldSample);

    // Display XML conversion
    System.out.println("Display XML Result:");
    vtMaker.toXML(System.out);

    // Sample to show how to build object from XML file
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    vtMaker.toXML(bos);
    IOUtils.closeQuietly(bos);

    // emulate a new reading
    InputStream is = new ByteArrayInputStream(bos.toByteArray());
    vtMaker = new XMLValueTypeDescriptionManager();
    vtMaker.loadListFromXML(is);
    List<ValueTypeDescription> result = vtMaker.getValueTypesDescriptionList();
    System.out.println();
    System.out.println();
    System.out.println("Result of XML Loading :");
    for (ValueTypeDescription propertyDescription : result) {
        System.out.println(propertyDescription.getType() + " :" + propertyDescription.getDescription());
        if (propertyDescription.getFields() != null) {
            Iterator<FieldDescription> fit = propertyDescription.getFields().iterator();
            FieldDescription field;
            while (fit.hasNext()) {
                field = fit.next();
                System.out.println("Field " + field.getName() + " :" + field.getValueType());
            }
        }
    }

}

From source file:importer.handler.post.stages.Splitter.java

/** test and commandline utility */
public static void main(String[] args) {
    if (args.length >= 1) {
        try {//from  w w w  . ja  va  2  s  . c o m
            int i = 0;
            int fileIndex = 0;
            // see if the user supplied a conf file
            String textConf = Discriminator.defaultConf;
            while (i < args.length) {
                if (args[i].equals("-c") && i < args.length - 1) {
                    textConf = readConfig(args[i + 1]);
                    i += 2;
                } else {
                    fileIndex = i;
                    i++;
                }
            }
            File f = new File(args[fileIndex]);
            char[] data = new char[(int) f.length()];
            FileReader fr = new FileReader(f);
            fr.read(data);
            JSONObject config = (JSONObject) JSONValue.parse(textConf);
            Splitter split = new Splitter(config);
            Map<String, String> map = split.split(new String(data));
            Set<String> keys = map.keySet();
            String rawFileName = args[fileIndex];
            int pos = rawFileName.lastIndexOf(".");
            if (pos != -1)
                rawFileName = rawFileName.substring(0, pos);
            Iterator<String> iter = keys.iterator();
            while (iter.hasNext()) {
                String key = iter.next();
                String fName = rawFileName + "-" + key + ".xml";
                File g = new File(fName);
                if (g.exists())
                    g.delete();
                FileOutputStream fos = new FileOutputStream(g);
                fos.write(map.get(key).getBytes("UTF-8"));
                fos.close();
            }
        } catch (Exception e) {
            e.printStackTrace(System.out);
        }
    } else
        System.out.println("usage: java -jar split.jar [-c json-config] <tei-xml>\n");
}

From source file:gedi.lfc.quick.ShiroguchiCounter.java

public static void main(String[] args) throws IOException {

    String path = "/home/users/erhard/biostor/seq/ngade/shiroguchi_randombarcodes/data/";

    MemoryIntervalTreeStorage<int[]> reads = new MemoryIntervalTreeStorage<int[]>(int[].class);

    String[] files = { "Shiroguchi_A_collapsed.bed", "Shiroguchi_B_collapsed.bed",
            "Shiroguchi_A_uncollapsed.bed", "Shiroguchi_B_uncollapsed.bed" };
    for (int i = 0; i < 4; i++) {
        Iterator<String> it = new LineOrientedFile(path + files[i]).lineIterator();
        while (it.hasNext()) {
            String[] f = StringUtils.split(it.next(), '\t');
            Chromosome chr = Chromosome.obtain(f[0]);
            ArrayGenomicRegion region = new ArrayGenomicRegion(Integer.parseInt(f[1]), Integer.parseInt(f[2]));
            int c = Integer.parseInt(StringUtils.splitField(f[3], '|', 0));

            int[] counts = reads.getData(chr, region);
            if (counts == null)
                reads.add(chr, region, counts = new int[4]);

            counts[i] += c;/*from w  w w .  j  a  v  a 2s .c om*/
        }
    }

    HashMap<String, String> map = new HashMap<String, String>();
    new LineOrientedFile(path + "U00096.2.genes.csv").lineIterator().forEachRemaining(s -> {
        String[] f = StringUtils.split(s, '\t');
        map.put(f[0], f[7]);
    });

    LineOrientedFile fragments = new LineOrientedFile("fragments.csv");
    fragments.startWriting();
    fragments.writef("Gene\tonlyA\tonlyB\tBoth\tLength\n");

    LineOrientedFile bias = new LineOrientedFile("bias.csv");
    bias.startWriting();
    bias.writef("OriginalA\tBiasA\tOriginalB\tBiasB\n");

    IntArrayList biasFactors = new IntArrayList();
    ArrayList<GeneData> geneData = new ArrayList<GeneData>();

    MemoryIntervalTreeStorage<Transcript> genes = new BiomartExonFileReader(path + "U00096.2.exons.csv", false)
            .readIntoMemoryTakeFirst();
    for (ImmutableReferenceGenomicRegion<Transcript> g : genes.getReferenceGenomicRegions()) {
        ArrayList<ImmutableReferenceGenomicRegion<int[]>> frag = reads
                .getReferenceRegionsIntersecting(g.getReference().toStrandIndependent(), g.getRegion());

        GeneData gd = new GeneData();

        int l = g.getRegion().getTotalLength();
        for (ImmutableReferenceGenomicRegion<int[]> r : frag) {
            if (r.getData()[0] == 0)
                gd.onlyB++;
            if (r.getData()[1] == 0)
                gd.onlyA++;
            if (r.getData()[0] == 0 && r.getData()[1] == 0)
                throw new RuntimeException();

            bias.writef("%d\t%.0f\t%d\t%.0f\n", r.getData()[0], r.getData()[2] / (double) r.getData()[0],
                    r.getData()[1], r.getData()[3] / (double) r.getData()[1]);
            if (r.getData()[0] > 0) {
                biasFactors.add(r.getData()[2] / r.getData()[0]);
            }
            if (r.getData()[1] > 0) {
                biasFactors.add(r.getData()[3] / r.getData()[1]);
            }

        }
        gd.both = frag.size() - gd.onlyA - gd.onlyB;
        fragments.writef("%s\t%d\t%d\t%d\t%d\n", map.get(g.getData().getTranscriptId()), gd.onlyA, gd.onlyB,
                gd.both, l);

        if (gd.onlyA + gd.onlyB + gd.both > 0)
            geneData.add(gd);
    }

    fragments.finishWriting();
    bias.finishWriting();

    double fc = 1.4;
    int rep = 5;
    int nDiff = 1000;
    int n = 10000;
    int N = 6000;
    double noise = 0.05;

    LineOrientedFile countMatrix = new LineOrientedFile("countMatrix.csv");
    countMatrix.startWriting();

    LineOrientedFile downCountMatrix = new LineOrientedFile("countMatrix_downsampled.csv");
    downCountMatrix.startWriting();

    RandomNumbers rnd = new RandomNumbers();
    for (int i = 0; i < n; i++) {
        GeneData gd = geneData.get(rnd.getUnif(0, geneData.size()));

        //         int N = gd.both==0?Integer.MAX_VALUE/2:(int) (gd.onlyA+gd.onlyB+gd.both+gd.onlyA*gd.onlyB/gd.both);
        double p1 = (gd.onlyA + gd.both) / (double) N;
        double p2 = i < nDiff ? p1 / fc : p1;

        ArrayList<ReadData> list = new ArrayList<ReadData>();
        for (int r = 0; r < rep * 2; r++) {
            int k = rnd.getBinom(N, r < rep ? p1 : p2) + 1;
            int hit = N == -1 ? 0 : rnd.getBinom(k, list.size() / (double) N);
            rnd.shuffle(list);
            for (int x = 0; x < hit; x++)
                list.get(x).reads[r] = (int) rnd.getNormal(list.get(x).bias, list.get(x).bias * noise);
            for (int x = 0; x < k - hit; x++)
                list.add(new ReadData(biasFactors.getInt(rnd.getUnif(0, biasFactors.size())), rep * 2, r));
        }

        int[] c = new int[rep * 2];
        for (ReadData d : list) {
            for (int r = 0; r < c.length; r++) {
                c[r] += d.reads[r];
            }
        }

        double[] down = new double[rep * 2];
        for (ReadData d : list) {
            double max = ArrayUtils.max(d.reads);
            for (int r = 0; r < down.length; r++) {
                down[r] += d.reads[r] / max;
            }
        }

        countMatrix.writeLine(StringUtils.concat("\t", c));
        downCountMatrix.writeLine(StringUtils.concat("\t", down));

    }

    countMatrix.finishWriting();
    downCountMatrix.finishWriting();

}

From source file:btrplace.fromEntropy.Converter.java

public static void main(String[] args) {
    String src, dst = null, output, scriptDC = null, dirScriptsCL = null;

    if (args.length < 5 || args.length > 6 || !args[args.length - 2].equals("-o")) {
        usage(1);/*w  w w .j  a v  a2s . c o m*/
    }
    src = args[0];
    output = args[args.length - 1];
    if (args.length > 5) {
        dst = args[1];
    }
    scriptDC = args[args.length - 4];
    dirScriptsCL = args[args.length - 3];

    OutputStreamWriter out = null;
    try {
        // Convert the src file
        ConfigurationConverter conv = new ConfigurationConverter(src);
        Instance i = conv.getInstance();

        // Read the dst file, deduce and add the states constraints
        if (dst != null) {
            i.getSatConstraints().addAll(conv.getNextStates(dst));
        }

        // Read the script files
        ScriptBuilder scriptBuilder = new ScriptBuilder(i.getModel());
        //scriptBuilder.setIncludes(new PathBasedIncludes(scriptBuilder,
        //        new File("src/test/resources")));

        // Read the datacenter script file if exists
        if (scriptDC != null) {
            String strScriptDC = null;
            try {
                strScriptDC = readFile(scriptDC);
            } catch (IOException e) {
                e.printStackTrace();
            }
            Script scrDC = null;
            try {
                // Build the DC script
                scrDC = scriptBuilder.build(strScriptDC);

            } catch (ScriptBuilderException sbe) {
                System.out.println(sbe);
            }

            // Set the DC script as an include
            BasicIncludes bi = new BasicIncludes();
            bi.add(scrDC);
            scriptBuilder.setIncludes(bi);
        }

        // Read all the client script files
        String scriptCL = null, strScriptCL = null;
        Script scrCL = null;
        Iterator it = FileUtils.iterateFiles(new File(dirScriptsCL), null, false);
        while (it.hasNext()) {
            scriptCL = dirScriptsCL + "/" + ((File) it.next()).getName();

            if (scriptCL != null) {
                // Read
                try {
                    strScriptCL = readFile(scriptCL);
                } catch (IOException e) {
                    e.printStackTrace();
                }

                // Parse
                try {
                    scrCL = scriptBuilder.build(strScriptCL);

                } catch (ScriptBuilderException sbe) {
                    System.out.println(sbe);
                    sbe.printStackTrace();
                }

                // Add the resulting constraints
                if (scrCL.getConstraints() != null) {
                    i.getSatConstraints().addAll(scrCL.getConstraints());
                }
            }
        }

        /************** PATCH **************/
        // State constraints;
        for (Node n : i.getModel().getMapping().getOnlineNodes()) {
            i.getSatConstraints().add(new Online(n));
        }
        for (Node n : i.getModel().getMapping().getOfflineNodes()) {
            i.getSatConstraints().add(new Offline(n));
        }
        // Remove preserve constraints
        for (Iterator<SatConstraint> ite = i.getSatConstraints().iterator(); ite.hasNext();) {
            SatConstraint s = ite.next();
            if (s instanceof Preserve && src.contains("nr")) {
                ite.remove();
            }
        }
        /************************************/

        // Convert to JSON
        InstanceConverter iConv = new InstanceConverter();
        JSONObject o = iConv.toJSON(i);

        // Check for gzip extension
        if (output.endsWith(".gz")) {
            out = new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(output)));
        } else {
            out = new FileWriter(output);
        }

        // Write the output file
        o.writeJSONString(out);
        out.close();

    } catch (Exception e) {
        System.err.println(e.getMessage());
        e.printStackTrace();
        System.exit(1);
    } finally {
        if (out != null) {
            try {
                out.close();
            } catch (IOException e) {
                System.err.println(e.getMessage());
                System.exit(1);
            }
        }
    }
}

From source file:cosmos.example.BuildingPermitsExample.java

public static void main(String[] args) throws Exception {
    BuildingPermitsExample example = new BuildingPermitsExample();
    new JCommander(example, args);

    File inputFile = new File(example.fileName);

    Preconditions.checkArgument(inputFile.exists() && inputFile.isFile() && inputFile.canRead(),
            "Expected " + example.fileName + " to be a readable file");

    String zookeepers;/*  ww w .  j a va 2 s . c o  m*/
    String instanceName;
    Connector connector;
    MiniAccumuloCluster mac = null;
    File macDir = null;

    // Use the MiniAccumuloCluster is requested
    if (example.useMiniAccumuloCluster) {
        macDir = Files.createTempDir();
        String password = "password";
        MiniAccumuloConfig config = new MiniAccumuloConfig(macDir, password);
        config.setNumTservers(1);

        mac = new MiniAccumuloCluster(config);
        mac.start();

        zookeepers = mac.getZooKeepers();
        instanceName = mac.getInstanceName();

        ZooKeeperInstance instance = new ZooKeeperInstance(instanceName, zookeepers);
        connector = instance.getConnector("root", new PasswordToken(password));
    } else {
        // Otherwise connect to a running instance
        zookeepers = example.zookeepers;
        instanceName = example.instanceName;

        ZooKeeperInstance instance = new ZooKeeperInstance(instanceName, zookeepers);
        connector = instance.getConnector(example.username, new PasswordToken(example.password));
    }

    // Instantiate an instance of Cosmos
    Cosmos cosmos = new CosmosImpl(zookeepers);

    // Create a definition for the data we want to load
    Store id = Store.create(connector, new Authorizations(), AscendingIndexIdentitySet.create());

    // Register the definition with Cosmos so it can track its progress.
    cosmos.register(id);

    // Load all of the data from our inputFile
    LoadBuildingPermits loader = new LoadBuildingPermits(cosmos, id, inputFile);
    loader.run();

    // Finalize the SortableResult which will prevent future writes to the data set
    cosmos.finalize(id);

    // Flush the ingest traces to the backend so we can see the results;
    id.sendTraces();

    // Get back the Set of Columns that we've ingested.
    Set<Column> schema = Sets.newHashSet(cosmos.columns(id));

    log.debug("\nColumns: " + schema);

    Iterator<Column> iter = schema.iterator();
    while (iter.hasNext()) {
        Column c = iter.next();
        // Remove the internal ID field and columns that begin with CONTRACTOR_
        if (c.equals(LoadBuildingPermits.ID) || c.name().startsWith("CONTRACTOR_")) {
            iter.remove();
        }
    }

    Iterable<Index> indices = Iterables.transform(schema, new Function<Column, Index>() {

        @Override
        public Index apply(Column col) {
            return Index.define(col);
        }

    });

    // Ensure that we have locality groups set as we expect
    log.info("Ensure locality groups are set");
    id.optimizeIndices(indices);

    // Compact down the data for this SortableResult    
    log.info("Issuing compaction for relevant data");
    id.consolidate();

    final int numTopValues = 10;

    // Walk through each column in the result set
    for (Column c : schema) {
        Stopwatch sw = new Stopwatch();
        sw.start();

        // Get the number of times we've seen each value in a given column
        CloseableIterable<Entry<RecordValue<?>, Long>> groupingsInColumn = cosmos.groupResults(id, c);

        log.info(c.name() + ":");

        // Iterate over the counts, collecting the top N values in each column
        TreeMap<Long, RecordValue<?>> topValues = Maps.newTreeMap();

        for (Entry<RecordValue<?>, Long> entry : groupingsInColumn) {
            if (topValues.size() == numTopValues) {
                Entry<Long, RecordValue<?>> least = topValues.pollFirstEntry();

                if (least.getKey() < entry.getValue()) {
                    topValues.put(entry.getValue(), entry.getKey());
                } else {
                    topValues.put(least.getKey(), least.getValue());
                }
            } else if (topValues.size() < numTopValues) {
                topValues.put(entry.getValue(), entry.getKey());
            }
        }

        for (Long key : topValues.descendingKeySet()) {
            log.info(topValues.get(key).value() + " occurred " + key + " times");
        }

        sw.stop();

        log.info("Took " + sw.toString() + " to run query.\n");
    }

    log.info("Deleting records");

    // Delete the records we've ingested
    if (!example.useMiniAccumuloCluster) {
        // Because I'm lazy and don't want to wait around to run the BatchDeleter when we're just going
        // to rm -rf the directory in a few secs.
        cosmos.delete(id);
    }

    // And shut down Cosmos
    cosmos.close();

    log.info("Cosmos stopped");

    // If we were using MAC, also stop that
    if (example.useMiniAccumuloCluster && null != mac) {
        mac.stop();
        if (null != macDir) {
            FileUtils.deleteDirectory(macDir);
        }
    }
}

From source file:com.browseengine.bobo.serialize.JSONSerializer.java

public static void main(String[] args) throws Exception {
    class B implements JSONSerializable {
        transient int tIntVal = 6;
        String s = "bstring";
        float[] fArray = new float[] { 1.3f, 1.2f, 2.5f };
    }/*from  w w w .j a  v a 2  s. c  o m*/
    class C implements JSONExternalizable {
        private HashMap<String, String> map = new HashMap<String, String>();

        public void fromJSON(JSONObject obj) throws JSONSerializationException, JSONException {
            map.clear();
            Iterator iter = obj.keys();
            while (iter.hasNext()) {
                String key = (String) iter.next();
                String val = obj.getString(key);
                map.put(key, val);
            }
        }

        public JSONObject toJSON() throws JSONSerializationException, JSONException {
            JSONObject retVal = new JSONObject();
            Iterator<String> iter = map.keySet().iterator();
            while (iter.hasNext()) {
                String name = iter.next();
                String val = map.get(name);
                retVal.put(name, val);
            }
            return retVal;
        }

        public void set(String name, String val) {
            map.put(name, val);
        }
    }
    class A implements JSONSerializable {
        int intVal = 4;
        double doubleVal = 1.2;
        short shortVal = 12;
        HashMap hash = new HashMap();
        int[] intArray = new int[] { 1, 3 };
        String[] strArray = new String[] { "john", "wang" };
        B[] b = new B[] { new B(), new B() };
        B b2 = new B();
        C c = new C();

        A() {
            c.set("city", "san jose");
            c.set("country", "usa");
        }
    }

    JSONObject jsonObj = JSONSerializer.serializeJSONObject(new A());

    String s1 = jsonObj.toString();

    System.out.println(s1);

    A a = (A) deSerialize(A.class, jsonObj);
    jsonObj = JSONSerializer.serializeJSONObject(a);
    String s2 = jsonObj.toString();

    System.out.println(s1.equals(s2));
}

From source file:com.cloud.test.longrun.PerformanceWithAPI.java

public static void main(String[] args) {

    List<String> argsList = Arrays.asList(args);
    Iterator<String> iter = argsList.iterator();
    String host = "http://localhost";
    int numThreads = 1;

    while (iter.hasNext()) {
        String arg = iter.next();
        if (arg.equals("-h")) {
            host = "http://" + iter.next();
        }/*  w  ww  . j  a  v  a2  s. com*/
        if (arg.equals("-t")) {
            numThreads = Integer.parseInt(iter.next());
        }
        if (arg.equals("-n")) {
            numVM = Integer.parseInt(iter.next());
        }
    }

    final String server = host + ":" + _apiPort + "/";
    final String developerServer = host + ":" + _developerPort + _apiUrl;

    s_logger.info("Starting test in " + numThreads + " thread(s). Each thread is launching " + numVM + " VMs");

    for (int i = 0; i < numThreads; i++) {
        new Thread(new Runnable() {
            public void run() {
                try {

                    String username = null;
                    String singlePrivateIp = null;
                    String singlePublicIp = null;
                    Random ran = new Random();
                    username = Math.abs(ran.nextInt()) + "-user";

                    //Create User
                    User myUser = new User(username, username, server, developerServer);
                    try {
                        myUser.launchUser();
                        myUser.registerUser();
                    } catch (Exception e) {
                        s_logger.warn("Error code: ", e);
                    }

                    if (myUser.getUserId() != null) {
                        s_logger.info("User " + myUser.getUserName()
                                + " was created successfully, starting VM creation");
                        //create VMs for the user
                        for (int i = 0; i < numVM; i++) {
                            //Create a new VM, add it to the list of user's VMs
                            VirtualMachine myVM = new VirtualMachine(myUser.getUserId());
                            myVM.deployVM(_zoneId, _serviceOfferingId, _templateId, myUser.getDeveloperServer(),
                                    myUser.getApiKey(), myUser.getSecretKey());
                            myUser.getVirtualMachines().add(myVM);
                            singlePrivateIp = myVM.getPrivateIp();

                            if (singlePrivateIp != null) {
                                s_logger.info(
                                        "VM with private Ip " + singlePrivateIp + " was successfully created");
                            } else {
                                s_logger.info("Problems with VM creation for a user" + myUser.getUserName());
                                break;
                            }

                            //get public IP address for the User            
                            myUser.retrievePublicIp(_zoneId);
                            singlePublicIp = myUser.getPublicIp().get(myUser.getPublicIp().size() - 1);
                            if (singlePublicIp != null) {
                                s_logger.info("Successfully got public Ip " + singlePublicIp + " for user "
                                        + myUser.getUserName());
                            } else {
                                s_logger.info("Problems with getting public Ip address for user"
                                        + myUser.getUserName());
                                break;
                            }

                            //create ForwardProxy rules for user's VMs
                            int responseCode = CreateForwardingRule(myUser, singlePrivateIp, singlePublicIp,
                                    "22", "22");
                            if (responseCode == 500)
                                break;
                        }

                        s_logger.info("Deployment successful..." + numVM
                                + " VMs were created. Waiting for 5 min before performance test");
                        Thread.sleep(300000L); // Wait 

                        //Start performance test for the user
                        s_logger.info("Starting performance test for Guest network that has "
                                + myUser.getPublicIp().size() + " public IP addresses");
                        for (int j = 0; j < myUser.getPublicIp().size(); j++) {
                            s_logger.info("Starting test for user which has "
                                    + myUser.getVirtualMachines().size() + " vms. Public IP for the user is "
                                    + myUser.getPublicIp().get(j) + " , number of retries is " + _retry
                                    + " , private IP address of the machine is"
                                    + myUser.getVirtualMachines().get(j).getPrivateIp());
                            guestNetwork myNetwork = new guestNetwork(myUser.getPublicIp().get(j), _retry);
                            myNetwork.setVirtualMachines(myUser.getVirtualMachines());
                            new Thread(myNetwork).start();
                        }

                    }
                } catch (Exception e) {
                    s_logger.error(e);
                }
            }
        }).start();

    }
}

From source file:net.ontopia.topicmaps.cmdlineutils.rdbms.RDBMSIndexTool.java

public static void main(String[] argv) throws Exception {

    // Initialize logging
    CmdlineUtils.initializeLogging();/*  ww  w.j  av a  2s.c  om*/

    // Register logging options
    CmdlineOptions options = new CmdlineOptions("RDBMSIndexTool", argv);
    CmdlineUtils.registerLoggingOptions(options);

    // Parse command line options
    try {
        options.parse();
    } catch (CmdlineOptions.OptionsException e) {
        System.err.println("Error: " + e.getMessage());
        System.exit(1);
    }

    // Get command line arguments
    String[] args = options.getArguments();

    if (args.length != 1) {
        usage();
        System.exit(3);
    }

    // load database schema project
    ClassLoader cloader = RDBMSIndexTool.class.getClassLoader();
    InputStream istream = cloader.getResourceAsStream("net/ontopia/topicmaps/impl/rdbms/config/schema.xml");
    Project dbp = DatabaseProjectReader.loadProject(istream);

    // open database connection
    String propfile = args[0];
    ConnectionFactoryIF cf = new DefaultConnectionFactory(PropertyUtils.loadProperties(new File(propfile)),
            true);

    Connection conn = cf.requestConnection();
    try {
        DatabaseMetaData dbm = conn.getMetaData();
        boolean downcase = dbm.storesLowerCaseIdentifiers();

        Map extra_indexes = new TreeMap();
        Map missing_indexes = new TreeMap();

        Iterator tables = dbp.getTables().iterator();
        while (tables.hasNext()) {
            Table table = (Table) tables.next();
            String table_name = (downcase ? table.getName().toLowerCase() : table.getName());
            //! System.out.println("T :"  + table_name);

            // get primary keys from database
            Map pkeys = getPrimaryKeys(table_name, dbm);

            // get indexes from database
            Map indexes = getIndexes(table_name, dbm);

            Map dindexes = new HashMap();
            if (table.getPrimaryKeys() != null) {
                String pkey = table_name + '(' + StringUtils.join(table.getPrimaryKeys(), ',') + ')';
                if (!pkeys.containsKey(pkey))
                    System.out.println("PKM: " + pkey);
            }

            Iterator iter = table.getIndexes().iterator();
            while (iter.hasNext()) {
                Index index = (Index) iter.next();
                String i = table_name + '(' + StringUtils.join(index.getColumns(), ',') + ')';
                String index_name = (downcase ? index.getName().toLowerCase() : index.getName());
                dindexes.put(i, index_name);
            }

            Set extra = new HashSet(indexes.keySet());
            extra.removeAll(dindexes.keySet());
            extra.removeAll(pkeys.keySet());
            if (!extra.isEmpty()) {
                Iterator i = extra.iterator();
                while (i.hasNext()) {
                    Object k = i.next();
                    extra_indexes.put(k, indexes.get(k));
                }
            }

            Set missing = new HashSet(dindexes.keySet());
            missing.addAll(pkeys.keySet());
            missing.removeAll(indexes.keySet());
            if (!missing.isEmpty()) {
                Iterator i = missing.iterator();
                while (i.hasNext()) {
                    Object k = i.next();
                    missing_indexes.put(k, dindexes.get(k));
                }
            }

        }
        if (!extra_indexes.isEmpty())
            System.out.println("/* --- Extra indexes ----------------------------------------- */");
        Iterator eiter = extra_indexes.keySet().iterator();
        while (eiter.hasNext()) {
            Object k = eiter.next();
            System.out.println("drop index " + extra_indexes.get(k) + "; /* " + k + " */");
        }

        if (!missing_indexes.isEmpty())
            System.out.println("/* --- Missing indexes---------------------------------------- */");
        Iterator miter = missing_indexes.keySet().iterator();
        while (miter.hasNext()) {
            Object k = miter.next();
            System.out.println("create index " + missing_indexes.get(k) + " on " + k + ";");
        }

    } finally {
        conn.rollback();
        conn.close();
    }

}

From source file:com.sludev.mssqlapplylog.MSSQLApplyLogMain.java

public static void main(String[] args) {
    CommandLineParser parser = new DefaultParser();
    Options options = new Options();

    // Most of the following defaults should be changed in
    // the --conf or "conf.properties" file
    String sqlURL = null;/*from   w w w.  j  a  v a2 s .com*/
    String sqlUser = null;
    String sqlPass = null;
    String sqlDb = null;
    String sqlHost = "127.0.0.1";
    String backupDirStr = null;
    String laterThanStr = "";
    String fullBackupPathStr = null;
    String fullBackupPatternStr = "(?:[\\w_-]+?)(\\d+)\\.bak";
    String fullBackupDatePatternStr = "yyyyMMddHHmm";
    String sqlProcessUser = null;
    String logBackupPatternStr = "(.*)\\.trn";
    String logBackupDatePatternStr = "yyyyMMddHHmmss";

    boolean doFullRestore = false;
    Boolean useLogFileLastMode = null;
    Boolean monitorLogBackupDir = null;

    options.addOption(Option.builder().longOpt("conf").desc("Configuration file.").hasArg().build());

    options.addOption(Option.builder().longOpt("laterthan").desc("'Later Than' file filter.").hasArg().build());

    options.addOption(Option.builder().longOpt("restore-full")
            .desc("Restore the full backup before continuing.").build());

    options.addOption(Option.builder().longOpt("use-lastmod")
            .desc("Sort/filter the log backups using their File-System 'Last Modified' date.").build());

    options.addOption(Option.builder().longOpt("monitor-backup-dir")
            .desc("Monitor the backup directory for new log backups, and apply them.").build());

    CommandLine line = null;
    try {
        try {
            line = parser.parse(options, args);
        } catch (ParseException ex) {
            throw new MSSQLApplyLogException(String.format("Error parsing command line.'%s'", ex.getMessage()),
                    ex);
        }

        String confFile = null;

        // Process the command line arguments
        Iterator cmdI = line.iterator();
        while (cmdI.hasNext()) {
            Option currOpt = (Option) cmdI.next();
            String currOptName = currOpt.getLongOpt();

            switch (currOptName) {
            case "conf":
                // Parse the configuration file
                confFile = currOpt.getValue();
                break;

            case "laterthan":
                // "Later Than" file date filter
                laterThanStr = currOpt.getValue();
                break;

            case "restore-full":
                // Do a full backup restore before restoring logs
                doFullRestore = true;
                break;

            case "monitor-backup-dir":
                // Monitor the backup directory for new logs
                monitorLogBackupDir = true;
                break;

            case "use-lastmod":
                // Use the last-modified date on Log Backup files for sorting/filtering
                useLogFileLastMode = true;
                break;
            }
        }

        Properties confProperties = null;

        if (StringUtils.isBlank(confFile) || Files.isReadable(Paths.get(confFile)) == false) {
            throw new MSSQLApplyLogException(
                    "Missing or unreadable configuration file.  Please specify --conf");
        } else {
            // Process the conf.properties file
            confProperties = new Properties();
            try {
                confProperties.load(Files.newBufferedReader(Paths.get(confFile)));
            } catch (IOException ex) {
                throw new MSSQLApplyLogException("Error loading properties file", ex);
            }

            sqlURL = confProperties.getProperty("sqlURL", "");
            sqlUser = confProperties.getProperty("sqlUser", "");
            sqlPass = confProperties.getProperty("sqlPass", "");
            sqlDb = confProperties.getProperty("sqlDb", "");
            sqlHost = confProperties.getProperty("sqlHost", "");
            backupDirStr = confProperties.getProperty("backupDir", "");

            if (StringUtils.isBlank(laterThanStr)) {
                laterThanStr = confProperties.getProperty("laterThan", "");
            }

            fullBackupPathStr = confProperties.getProperty("fullBackupPath", fullBackupPathStr);
            fullBackupPatternStr = confProperties.getProperty("fullBackupPattern", fullBackupPatternStr);
            fullBackupDatePatternStr = confProperties.getProperty("fullBackupDatePattern",
                    fullBackupDatePatternStr);
            sqlProcessUser = confProperties.getProperty("sqlProcessUser", "");

            logBackupPatternStr = confProperties.getProperty("logBackupPattern", logBackupPatternStr);
            logBackupDatePatternStr = confProperties.getProperty("logBackupDatePattern",
                    logBackupDatePatternStr);

            if (useLogFileLastMode == null) {
                String useLogFileLastModeStr = confProperties.getProperty("useLogFileLastMode", "false");
                useLogFileLastMode = Boolean
                        .valueOf(StringUtils.lowerCase(StringUtils.trim(useLogFileLastModeStr)));
            }

            if (monitorLogBackupDir == null) {
                String monitorBackupDirStr = confProperties.getProperty("monitorBackupDir", "false");
                monitorLogBackupDir = Boolean
                        .valueOf(StringUtils.lowerCase(StringUtils.trim(monitorBackupDirStr)));
            }
        }
    } catch (MSSQLApplyLogException ex) {
        try (StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw)) {
            pw.append(String.format("Error : '%s'\n\n", ex.getMessage()));

            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp(pw, 80, "\njava -jar mssqlapplylog.jar ",
                    "\nThe MSSQLApplyLog application can be used in a variety of options and modes.\n", options,
                    0, 2, " All Rights Reserved.", true);

            System.out.println(sw.toString());
        } catch (IOException iex) {
            LOGGER.debug("Error processing usage", iex);
        }

        System.exit(1);
    }

    MSSQLApplyLogConfig config = MSSQLApplyLogConfig.from(backupDirStr, fullBackupPathStr,
            fullBackupDatePatternStr, laterThanStr, fullBackupPatternStr, logBackupPatternStr,
            logBackupDatePatternStr, sqlHost, sqlDb, sqlUser, sqlPass, sqlURL, sqlProcessUser,
            useLogFileLastMode, doFullRestore, monitorLogBackupDir);

    MSSQLApplyLog logProc = MSSQLApplyLog.from(config);

    BasicThreadFactory thFactory = new BasicThreadFactory.Builder().namingPattern("restoreThread-%d").build();

    ExecutorService mainThreadExe = Executors.newSingleThreadExecutor(thFactory);

    Future<Integer> currRunTask = mainThreadExe.submit(logProc);

    mainThreadExe.shutdown();

    Integer resp = 0;
    try {
        resp = currRunTask.get();
    } catch (InterruptedException ex) {
        LOGGER.error("Application 'main' thread was interrupted", ex);
    } catch (ExecutionException ex) {
        LOGGER.error("Application 'main' thread execution error", ex);
    } finally {
        // If main leaves for any reason, shutdown all threads
        mainThreadExe.shutdownNow();
    }

    System.exit(resp);
}

From source file:at.tlphotography.jAbuseReport.Reporter.java

/**
 * The main method./*from ww w  . j  a v a2 s. c  o  m*/
 *
 * @param args
 *          the arguments
 */
public static void main(String[] args) {
    parseArguments(args);

    File[] directory = new File(logDir).listFiles(); // get the files in the dir

    for (File file : directory) // iterate over the file
    {
        if (!file.isDirectory() && file.getName().contains(logNames)) // if the file is not a dir and the name contains the logName string
        {
            if (file.getName().endsWith(".gz")) // is it zipped?
            {
                content.putAll(readGZFile(file));
            } else {
                content.putAll(readLogFile(file));
            }
        }
    }

    // save the mails to the log lines
    HashMap<String, ArrayList<LogObject>> finalContent = new HashMap<>();

    Iterator<Entry<String, String>> it = content.entrySet().iterator();
    while (it.hasNext()) {
        Map.Entry<String, String> pair = it.next();
        String mail = whoIsLookUp(pair.getKey());

        if (finalContent.containsKey(mail)) {
            finalContent.get(mail).add(new LogObject(pair.getValue()));
        } else {
            ArrayList<LogObject> temp = new ArrayList<LogObject>();
            temp.add(new LogObject(pair.getValue()));
            finalContent.put(mail, temp);
        }

        it.remove();
    }

    // sort them
    Iterator<Entry<String, ArrayList<LogObject>>> it2 = finalContent.entrySet().iterator();
    while (it2.hasNext()) {
        Entry<String, ArrayList<LogObject>> pair = it2.next();
        Collections.sort(pair.getValue());
        println(pair.getKey() + " =");
        for (LogObject obj : pair.getValue()) {
            println(obj.logContent);
        }

        println("\n");
        it2.remove();
    }

}