Example usage for java.io IOException printStackTrace

List of usage examples for java.io IOException printStackTrace

Introduction

In this page you can find the example usage for java.io IOException printStackTrace.

Prototype

public void printStackTrace() 

Source Link

Document

Prints this throwable and its backtrace to the standard error stream.

Usage

From source file:it.isislab.dmason.util.SystemManagement.Worker.thrower.DMasonWorker.java

public static void main(String[] args) {
    RuntimeMXBean bean = ManagementFactory.getRuntimeMXBean();

    ////from   ww w.j av  a  2 s  .  c o  m
    // Get name representing the running Java virtual machine.
    // It returns something like 6460@AURORA. Where the value
    // before the @ symbol is the PID.
    //
    String jvmName = bean.getName();

    //Used for log4j properties
    System.setProperty("logfile.name", "worker" + jvmName);

    //Used for log4j properties
    System.setProperty("steplog.name", "workerStep" + jvmName);

    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH_mm_ss_SS");
    Date date = new Date();
    dateFormat.format(date);

    System.setProperty("timestamp", date.toLocaleString());

    System.setProperty("paramsfile.name", "params");
    try {
        File logPath = new File("Logs/workers");
        if (logPath.exists())
            FileUtils.cleanDirectory(logPath);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    logger = Logger.getLogger(DMasonWorker.class.getCanonicalName());
    logger.debug("StartWorker " + version);

    autoStart = false;
    connect = false;
    ip = null;
    port = null;
    String topic = "";
    updated = false;
    isBatch = false;
    topicPrefix = "";

    if (args.length == 0) {
        // Force waiting for beacon (requires ActiveMQWrapper)
        autoStart = false;
        connect = true;
    } else if (args.length == 2) {
        // Launched with IP and Port
        ip = args[0];
        port = args[1];
        autoStart = true;
        connect = true;
    } else if (args.length == 4) {
        // Used by D-Mason in order to restart a 
        // worker after update, batch execution, reset
        autoStart = true;
        ip = args[0];
        port = args[1];
        topic = args[2];
        if (args[3].equals("update")) {
            updated = true;
        }
        if (args[3].equals("reset")) {
            updated = false;
            isBatch = false;
        }
        if (args[3].contains("Batch")) {
            updated = false;
            isBatch = true;
            topicPrefix = args[3];
        }
    } else {
        System.out.println("Usage: StartWorker IP PORT");
    }

    DMasonWorker worker = new DMasonWorker(ip, port, topic);

    boolean connected = worker.startConnection();

    if (connected) {
        logger.debug("CONNECTED:");
        logger.debug("   IP     : " + worker.ipAddress.getIPaddress());
        logger.debug("   Port   : " + worker.ipAddress.getPort());
        logger.debug("   Prefix : " + DMasonWorker.topicPrefix);
        logger.debug("   Topic  : " + worker.myTopic);
    } else {
        logger.info("CONNECTION FAILED:");
        logger.debug("   IP     : " + worker.ipAddress.getIPaddress());
        logger.debug("   Port   : " + worker.ipAddress.getPort());
        logger.debug("   Prefix : " + DMasonWorker.topicPrefix);
        logger.debug("   Topic  : " + worker.myTopic);
    }
}

From source file:com.bluexml.tools.miscellaneous.PrepareSIDEModulesMigration.java

/**
 * @param args//  w w w . ja  v a  2 s . c o m
 */
public static void main(String[] args) {
    boolean inplace = false;

    String workspace = "/Users/davidabad/workspaces/SIDE-Modules/";
    String frameworkmodulesPath = "/Volumes/Data/SVN/side/HEAD/S-IDE/FrameworksModules/trunk/";
    String classifier_base = "enterprise";
    String version_base = "3.4.6";
    String classifier_target = "enterprise";
    String version_target = "3.4.11";
    String frameworkmodulesInplace = "/Volumes/Data/SVN/projects/Ifremer/IfremerV5/src/modules/mavenProjects";

    Properties props = new Properties();
    try {
        InputStream resourceAsStream = PrepareSIDEModulesMigration.class
                .getResourceAsStream("config.properties");
        if (resourceAsStream != null) {
            props.load(resourceAsStream);

            inplace = Boolean.parseBoolean(props.getProperty("inplace", Boolean.toString(inplace)));
            workspace = props.getProperty("workspace", workspace);
            frameworkmodulesPath = props.getProperty("frameworkmodulesPath", frameworkmodulesPath);
            classifier_base = props.getProperty("classifier_base", classifier_base);
            version_base = props.getProperty("version_base", version_base);
            classifier_target = props.getProperty("classifier_target", classifier_target);
            version_target = props.getProperty("version_target", version_target);
            frameworkmodulesInplace = props.getProperty("frameworkmodulesInplace", frameworkmodulesInplace);
        } else {
            System.out.println("no configuration founded in classpath config.properties");
        }

    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return;
    }

    System.out.println("properties :");
    Enumeration<?> propertyNames = props.propertyNames();
    while (propertyNames.hasMoreElements()) {
        String nextElement = propertyNames.nextElement().toString();
        System.out.println("\t " + nextElement + " : " + props.getProperty(nextElement));
    }

    File workspaceFile = new File(workspace);

    File targetHome = new File(workspaceFile, MIGRATION_FOLDER);
    if (targetHome.exists()) {
        try {
            FileUtils.deleteDirectory(targetHome);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            throw new RuntimeException(e);
        }
    }

    final String versionInProjectName = getVersionInProjectName(classifier_base, version_base);
    String versionInProjectName2 = getVersionInProjectName(classifier_target, version_target);

    if (frameworkmodulesPath.contains(",")) {
        // this is a list of paths
        String[] split = frameworkmodulesPath.split(",");
        for (String string : split) {
            if (StringUtils.trimToNull(string) != null) {
                executeInpath(inplace, string, classifier_base, version_base, classifier_target, version_target,
                        frameworkmodulesInplace, workspaceFile, versionInProjectName, versionInProjectName2);
            }
        }
    } else {
        executeInpath(inplace, frameworkmodulesPath, classifier_base, version_base, classifier_target,
                version_target, frameworkmodulesInplace, workspaceFile, versionInProjectName,
                versionInProjectName2);
    }

    System.out.println("Job's done !");
    System.out.println("Please check " + MIGRATION_FOLDER);
    System.out.println(
            "If all is ok you can use commit.sh in a terminal do : cd " + MIGRATION_FOLDER + "; sh commit.sh");
    System.out.println(
            "This script will create new svn projet and commit resources, add 'target' to svn:ignore ...");

}

From source file:io.anserini.index.UserPostFrequencyDistribution.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(new Option(HELP_OPTION, "show help"));

    options.addOption(new Option(STORE_TERM_VECTORS_OPTION, "store term vectors"));

    options.addOption(OptionBuilder.withArgName("collection").hasArg()
            .withDescription("source collection directory").create(COLLECTION_OPTION));
    options.addOption(OptionBuilder.withArgName("property").hasArg()
            .withDescription("source collection directory").create("property"));
    options.addOption(OptionBuilder.withArgName("collection_pattern").hasArg()
            .withDescription("source collection directory").create("collection_pattern"));

    CommandLine cmdline = null;/*from w  w w .ja  va 2 s  .  c  o m*/
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (cmdline.hasOption(HELP_OPTION) || !cmdline.hasOption(COLLECTION_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(UserPostFrequencyDistribution.class.getName(), options);
        System.exit(-1);
    }

    String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION);

    final FieldType textOptions = new FieldType();
    textOptions.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
    textOptions.setStored(true);
    textOptions.setTokenized(true);
    textOptions.setStoreTermVectors(true);

    LOG.info("collection: " + collectionPath);
    LOG.info("collection_pattern " + cmdline.getOptionValue("collection_pattern"));
    LOG.info("property " + cmdline.getOptionValue("property"));
    LongOpenHashSet deletes = null;

    long startTime = System.currentTimeMillis();
    File file = new File(collectionPath);
    if (!file.exists()) {
        System.err.println("Error: " + file + " does not exist!");
        System.exit(-1);
    }

    final JsonStatusCorpusReader stream = new JsonStatusCorpusReader(file,
            cmdline.getOptionValue("collection_pattern"));

    Runtime.getRuntime().addShutdownHook(new Thread() {
        public void run() {

            try {

                stream.close();
            } catch (IOException e1) {
                // TODO Auto-generated catch block
                e1.printStackTrace();
            }
            ;

            System.out.println("# of users indexed this round: " + userIndexedCount);

            System.out.println("Shutting down");

        }
    });
    Status status;
    boolean readerNotInitialized = true;

    try {
        Properties prop = new Properties();
        while ((status = stream.next()) != null) {

            // try{
            // status = DataObjectFactory.createStatus(s);
            // if (status==null||status.getText() == null) {
            // continue;
            // }}catch(Exception e){
            //
            // }
            //

            boolean pittsburghRelated = false;
            try {

                if (Math.abs(status.getLongitude() - pittsburghLongitude) < 0.05d
                        && Math.abs(status.getlatitude() - pittsburghLatitude) < 0.05d)
                    pittsburghRelated = true;
            } catch (Exception e) {

            }
            try {
                if (status.getPlace().contains("Pittsburgh, PA"))
                    pittsburghRelated = true;
            } catch (Exception e) {

            }
            try {
                if (status.getUserLocation().contains("Pittsburgh, PA"))
                    pittsburghRelated = true;
            } catch (Exception e) {

            }

            try {
                if (status.getText().contains("Pittsburgh"))
                    pittsburghRelated = true;
            } catch (Exception e) {

            }

            if (pittsburghRelated) {

                int previousPostCount = 0;

                if (prop.containsKey(String.valueOf(status.getUserid()))) {
                    previousPostCount = Integer
                            .valueOf(prop.getProperty(String.valueOf(status.getUserid())).split(" ")[1]);
                }

                prop.setProperty(String.valueOf(status.getUserid()),
                        String.valueOf(status.getStatusesCount()) + " " + (1 + previousPostCount));
                if (prop.size() > 0 && prop.size() % 1000 == 0) {
                    Runtime runtime = Runtime.getRuntime();
                    runtime.gc();
                    System.out.println("Property size " + prop.size() + "Memory used:  "
                            + ((runtime.totalMemory() - runtime.freeMemory()) / (1024L * 1024L)) + " MB\n");
                }
                OutputStream output = new FileOutputStream(cmdline.getOptionValue("property"), false);
                prop.store(output, null);
                output.close();

            }
        }
        //         prop.store(output, null);
        LOG.info(String.format("Total of %s statuses added", userIndexedCount));
        LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {

        stream.close();
    }
}

From source file:ZipExploder.java

/**
 * Main command line entry point./*  www .ja  va2  s.c  o  m*/
 * 
 * @param args
 */
public static void main(final String[] args) {
    if (args.length == 0) {
        printHelp();
        System.exit(0);
    }
    List zipNames = new ArrayList();
    List jarNames = new ArrayList();
    String destDir = null;
    boolean jarActive = false, zipActive = false, destDirActive = false;
    boolean verbose = false;
    // process arguments
    for (int i = 0; i < args.length; i++) {
        String arg = args[i];
        if (arg.charAt(0) == '-') { // switch
            arg = arg.substring(1);
            if (arg.equalsIgnoreCase("jar")) {
                jarActive = true;
                zipActive = false;
                destDirActive = false;
            } else if (arg.equalsIgnoreCase("zip")) {
                zipActive = true;
                jarActive = false;
                destDirActive = false;
            } else if (arg.equalsIgnoreCase("dir")) {
                jarActive = false;
                zipActive = false;
                destDirActive = true;
            } else if (arg.equalsIgnoreCase("verbose")) {
                verbose = true;
            } else {
                reportError("Invalid switch - " + arg);
            }
        } else {
            if (jarActive) {
                jarNames.add(arg);
            } else if (zipActive) {
                zipNames.add(arg);
            } else if (destDirActive) {
                if (destDir != null) {
                    reportError("duplicate argument - " + "-destDir");
                }
                destDir = arg;
            } else {
                reportError("Too many parameters - " + arg);
            }
        }
    }
    if (destDir == null || (zipNames.size() + jarNames.size()) == 0) {
        reportError("Missing parameters");
    }
    if (verbose) {
        System.out.println("Effective command: " + ZipExploder.class.getName() + " "
                + (jarNames.size() > 0 ? "-jars " + jarNames + " " : "")
                + (zipNames.size() > 0 ? "-zips " + zipNames + " " : "") + "-dir " + destDir);
    }
    try {
        ZipExploder ze = new ZipExploder(verbose);
        ze.process((String[]) zipNames.toArray(new String[zipNames.size()]),
                (String[]) jarNames.toArray(new String[jarNames.size()]), destDir);
    } catch (IOException ioe) {
        System.err.println("Exception - " + ioe.getMessage());
        ioe.printStackTrace(); // *** debug ***
        System.exit(2);
    }
}

From source file:com.mywork.framework.util.RemoteHttpUtil.java

public static void main(String[] args) {
    try {//  ww w.j  a  va  2s .co m

        // json
        {
            JsonObject bodyJson = new JsonObject();
            bodyJson.addProperty("uid", "testusername");
            bodyJson.addProperty("pic_file", "rcGysFG2xqLaO8tlvW4rFVmVqlnx+4qGewYs8a+enmoZ");
            String getResponse = RemoteHttpUtil
                    .fetchJsonHttpResponse("http://10.48.26.196:8080/face/json/postjson", null, bodyJson);
            System.out.print(getResponse);
        }

        // // get
        // {
        // String getResponse =
        // RemoteHttpUtil.fetchSimpleHttpResponse("get", "http://10.48.26.196:8080/face/json/getData",
        // null, null);
        // System.out.print(getResponse);
        // }
        //
        // // post
        // {
        // Map<String, String> bodyMap = new HashMap<String, String>();
        // bodyMap.put("aaa", "goodgod");
        // String postResponse =
        // RemoteHttpUtil.fetchSimpleHttpResponse("post", "http://10.48.26.196:8080/face/json/postData",
        // null, bodyMap);
        // System.out.print(postResponse);
        // }
        //
        // // form
        // {
        // Map<String, ContentBody> bodyMap = new HashMap<String, ContentBody>();
        // bodyMap.put("name", new StringBody("heiheiheihei", ContentType.TEXT_PLAIN));
        // bodyMap.put("file2", new FileBody(new File("D:\\temp\\test.txt")));
        // bodyMap.put("file3", new FileBody(new File("D:\\temp\\test2.txt")));
        // String multiHttpResponse =
        // RemoteHttpUtil.fetchMultipartHttpResponse("http://10.48.26.196:8080/face/file/uploadMultifile",
        // null, bodyMap);
        // System.out.print(multiHttpResponse);
        // }

    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

}

From source file:evaluation.evaluation1VMPolicyGeneration.java

public static void main(String[] args) {

    int VMNumber = 5;
    int attributeNumber = 20;

    JSONObject obj = new JSONObject();
    obj.put("name", "clientTemplate");
    obj.put("context", "VM-deployment");
    //obj.put("Context", new Integer);

    HashMap serviceRequirement = new HashMap();

    HashMap serviceDescription = new HashMap();
    serviceRequirement.put("VM1_volume", "1_GB");
    serviceDescription.put("VM1_purpose", "dev");
    serviceDescription.put("VM1_data", "private");
    serviceDescription.put("VM1_application", "internal");

    for (int j = 5; j < attributeNumber; j++) {
        serviceDescription.put("VM1_other" + j, "other");
    }//www. j av a  2s.c  om

    serviceRequirement.put("VM2_volume", "2_GB");
    serviceDescription.put("VM2_purpose", "prod");
    serviceDescription.put("VM2_data", "public");
    serviceDescription.put("VM2_application", "business");

    for (int j = 5; j < attributeNumber; j++) {
        serviceDescription.put("VM2_other" + j, "other");
    }

    serviceRequirement.put("VM3_volume", "1_GB");
    serviceDescription.put("VM3_purpose", "test");
    serviceDescription.put("VM3_data", "public");
    serviceDescription.put("VM3_application", "business");

    for (int j = 5; j < attributeNumber; j++) {
        serviceDescription.put("VM3_other" + j, "other");
    }

    serviceRequirement.put("VM4_volume", "12_GB");
    serviceDescription.put("VM4_purpose", "prod");
    serviceDescription.put("VM4_data", "public");
    serviceDescription.put("VM4_application", "business");

    for (int j = 5; j < attributeNumber; j++) {
        serviceDescription.put("VM4_other" + j, "other");
    }

    for (int i = 5; i < VMNumber; i++) {
        serviceRequirement.put("VM" + i + "_volume", "20_GB");
        serviceDescription.put("VM" + i + "_purpose", "prod");
        serviceDescription.put("VM" + i + "_data", "public");
        serviceDescription.put("VM" + i + "_application", "business");
        for (int j = 5; j < attributeNumber; j++) {
            serviceDescription.put("VM" + i + "_other" + j, "other");
        }

    }

    obj.put("serviceRequirement", serviceRequirement);
    obj.put("serviceDescription", serviceDescription);

    HashMap gauranteeTerm = new HashMap();
    gauranteeTerm.put("VM1_availability", "more_97_percentage");
    gauranteeTerm.put("VM2_availability", "more_99_percentage");
    gauranteeTerm.put("VM3_availability", "more_95_percentage");
    gauranteeTerm.put("VM4_availability", "more_99_percentage");
    obj.put("gauranteeTerm", gauranteeTerm);

    //Constraint1

    HashMap host_rule1 = new HashMap();
    HashMap VM_rule1 = new HashMap();
    host_rule1.put("certificate", "true");
    VM_rule1.put("purpose", "dev");

    ArrayList rule1 = new ArrayList();
    rule1.add("permission");
    rule1.add(host_rule1);
    rule1.add(VM_rule1);

    HashMap host_rule1_2 = new HashMap();
    HashMap VM_rule1_2 = new HashMap();
    host_rule1_2.put("certificate", "true");
    VM_rule1_2.put("purpose", "prod");

    ArrayList rule1_2 = new ArrayList();
    rule1_2.add("permission");
    rule1_2.add(host_rule1_2);
    rule1_2.add(VM_rule1_2);

    HashMap host_rule1_3 = new HashMap();
    HashMap VM_rule1_3 = new HashMap();
    host_rule1_3.put("certificate", "true");
    VM_rule1_3.put("purpose", "test");

    ArrayList rule1_3 = new ArrayList();
    rule1_3.add("permission");
    rule1_3.add(host_rule1_3);
    rule1_3.add(VM_rule1_3);

    HashMap host_rule2 = new HashMap();
    HashMap VM_rule2 = new HashMap();
    host_rule2.put("location", "France");
    VM_rule2.put("ID", "VM2");

    ArrayList rule2 = new ArrayList();
    rule2.add("permission");
    rule2.add(host_rule2);
    rule2.add(VM_rule2);

    HashMap host_rule2_1 = new HashMap();
    HashMap VM_rule2_1 = new HashMap();
    host_rule2_1.put("location", "UK");
    VM_rule2_1.put("ID", "VM2");

    ArrayList rule2_1 = new ArrayList();
    rule2_1.add("permission");
    rule2_1.add(host_rule2_1);
    rule2_1.add(VM_rule2_1);

    HashMap host_rule3 = new HashMap();
    HashMap VM_rule3 = new HashMap();
    host_rule3.put("location", "France");
    VM_rule3.put("application", "business");

    ArrayList rule3 = new ArrayList();
    rule3.add("permission");
    rule3.add(host_rule3);
    rule3.add(VM_rule3);

    HashMap host_rule3_1 = new HashMap();
    HashMap VM_rule3_1 = new HashMap();
    host_rule3_1.put("location", "UK");
    VM_rule3_1.put("application", "business");

    ArrayList rule3_1 = new ArrayList();
    rule3_1.add("permission");
    rule3_1.add(host_rule3_1);
    rule3_1.add(VM_rule3_1);

    HashMap VMSeperation_rule_1_1 = new HashMap();
    HashMap VMSeperation_rule_1_2 = new HashMap();

    VMSeperation_rule_1_1.put("ID", "VM1");
    VMSeperation_rule_1_2.put("ID", "VM3");

    ArrayList rule4 = new ArrayList();
    rule4.add("separation");
    rule4.add(VMSeperation_rule_1_1);
    rule4.add(VMSeperation_rule_1_2);

    ArrayList policyInConstraint1 = new ArrayList();
    policyInConstraint1.add(rule1);
    policyInConstraint1.add(rule1_2);
    policyInConstraint1.add(rule1_3);

    policyInConstraint1.add(rule2);
    policyInConstraint1.add(rule2_1);

    policyInConstraint1.add(rule3);
    policyInConstraint1.add(rule3_1);

    policyInConstraint1.add(rule4);

    ArrayList creationConstraint1 = new ArrayList();
    creationConstraint1.add("RP4");
    creationConstraint1.add("true");
    creationConstraint1.add("true");
    creationConstraint1.add(policyInConstraint1);

    ArrayList totalConstraint = new ArrayList();
    totalConstraint.add(creationConstraint1);

    obj.put("creationConstraint", totalConstraint);

    try {

        FileWriter file = new FileWriter("confClient" + File.separator + "test3.json");
        file.write(obj.toJSONString());
        file.flush();
        file.close();

    } catch (IOException e) {
        e.printStackTrace();
    }

    System.out.print(obj);

    /*
            
    JSONParser parser = new JSONParser();
            
    try {
            
    Object obj2 = parser.parse(new FileReader("test2.json"));
            
    JSONObject jsonObject = (JSONObject) obj2;
            
        HashMap serviceDescription2=(HashMap) jsonObject.get("serviceDescription");
                 
        method.printHashMap(serviceDescription2);
                
                
        HashMap gauranteeTerm2=(HashMap) jsonObject.get("gauranteeTerm");
                 
        method.printHashMap(gauranteeTerm2);
                
                
                
        ArrayList creationConstraint=(ArrayList) jsonObject.get("creationConstraint");
                
        method.printArrayList(creationConstraint);
            
            
    } catch (FileNotFoundException e) {
    e.printStackTrace();
    } catch (IOException e) {
    e.printStackTrace();
    } catch (ParseException e) {
    e.printStackTrace();
    }
            
            
            
            
            
    */

}

From source file:example.ConfigurationsExample.java

public static void main(String[] args) {
    String jdbcPropToLoad = "prod.properties";
    CommandLineParser parser = new PosixParser();
    Options options = new Options();
    options.addOption("d", "dev", false,
            "Dev tag to launch app in dev mode. Means that app will launch embedded mckoi db.");
    try {/*from   w  w  w  .  j a v  a2  s . com*/
        CommandLine line = parser.parse(options, args);
        if (line.hasOption("d")) {
            System.err.println("App is in DEV mode");
            jdbcPropToLoad = "dev.properties";
        }
    } catch (ParseException exp) {
        System.err.println("Parsing failed.  Reason: " + exp.getMessage());
    }
    Properties p = new Properties();
    try {
        p.load(ConfigurationsExample.class.getResourceAsStream("/" + jdbcPropToLoad));
    } catch (IOException e) {
        System.err.println("Properties loading failed.  Reason: " + e.getMessage());
    }
    try {
        String clazz = p.getProperty("driver.class");
        Class.forName(clazz);
        System.out.println(" Jdbc driver loaded :" + clazz);
    } catch (ClassNotFoundException e) {
        System.err.println("Jdbc Driver class loading failed.  Reason: " + e.getMessage());
        e.printStackTrace();
    }

}

From source file:ivory.ltr.GreedyLearn.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws InterruptedException, ExecutionException {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("input").hasArg()
            .withDescription("Input file that contains training instances.").isRequired().create("input"));
    options.addOption(OptionBuilder.withArgName("model").hasArg().withDescription("Model file to create.")
            .isRequired().create("model"));
    options.addOption(OptionBuilder.withArgName("numModels").hasArg()
            .withDescription("Number of models to consider each iteration (default=1).").create("numModels"));
    options.addOption(OptionBuilder.withArgName("className").hasArg()
            .withDescription("Java class name of metric to optimize for (default=ivory.ltr.NDCGMeasure)")
            .create("metric"));
    options.addOption(OptionBuilder.withArgName("threshold").hasArg()
            .withDescription("Feature correlation threshold for pruning (disabled by default).")
            .create("pruneCorrelated"));
    options.addOption(OptionBuilder.withArgName("log").withDescription("Include log features (default=false).")
            .create("log"));
    options.addOption(OptionBuilder.withArgName("product")
            .withDescription("Include product features (default=false).").create("product"));
    options.addOption(OptionBuilder.withArgName("quotient")
            .withDescription("Include quotient features (default=false).").create("quotient"));
    options.addOption(OptionBuilder.withArgName("numThreads").hasArg()
            .withDescription("Number of threads to utilize (default=1).").create("numThreads"));

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new GnuParser();

    String trainFile = null;//from  w ww . j  ava 2 s . c o  m
    String modelOutputFile = null;

    int numModels = 1;

    String metricClassName = "ivory.ltr.NDCGMeasure";

    boolean pruneCorrelated = false;
    double correlationThreshold = 1.0;

    boolean logFeatures = false;
    boolean productFeatures = false;
    boolean quotientFeatures = false;

    int numThreads = 1;

    // parse the command-line arguments
    try {
        CommandLine line = parser.parse(options, args);

        if (line.hasOption("input")) {
            trainFile = line.getOptionValue("input");
        }

        if (line.hasOption("model")) {
            modelOutputFile = line.getOptionValue("model");
        }

        if (line.hasOption("numModels")) {
            numModels = Integer.parseInt(line.getOptionValue("numModels"));
        }

        if (line.hasOption("metric")) {
            metricClassName = line.getOptionValue("metric");
        }

        if (line.hasOption("pruneCorrelated")) {
            pruneCorrelated = true;
            correlationThreshold = Double.parseDouble(line.getOptionValue("pruneCorrelated"));
        }

        if (line.hasOption("numThreads")) {
            numThreads = Integer.parseInt(line.getOptionValue("numThreads"));
        }

        if (line.hasOption("log")) {
            logFeatures = true;
        }

        if (line.hasOption("product")) {
            productFeatures = true;
        }

        if (line.hasOption("quotient")) {
            quotientFeatures = true;
        }
    } catch (ParseException exp) {
        System.err.println(exp.getMessage());
    }

    // were all of the required parameters specified?
    if (trainFile == null || modelOutputFile == null) {
        formatter.printHelp("GreedyLearn", options, true);
        System.exit(-1);
    }

    // learn the model
    try {
        GreedyLearn learn = new GreedyLearn();
        learn.train(trainFile, modelOutputFile, numModels, metricClassName, pruneCorrelated,
                correlationThreshold, logFeatures, productFeatures, quotientFeatures, numThreads);
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ConfigurationException e) {
        e.printStackTrace();
    } catch (InstantiationException e) {
        e.printStackTrace();
    } catch (IllegalAccessException e) {
        e.printStackTrace();
    } catch (ClassNotFoundException e) {
        e.printStackTrace();
    }
}

From source file:nab.detectors.htmjava.HTMModel.java

/**
 * Launch htm.java NAB detector/*w ww .j a v  a2s .  com*/
 *
 * Usage:
 *      As a standalone application (for debug purpose only):
 *
 *          java -jar htm.java-nab.jar "{\"modelParams\":{....}}" < nab_data.csv > anomalies.out
 *
 *      For complete list of command line options use:
 *
 *          java -jar htm.java-nab.jar --help
 *
 *      As a NAB detector (see 'htmjava_detector.py'):
 *
 *          python run.py --detect --score --normalize -d htmjava
 *
 *      Logging options, see "log4j.properties":
 *
 *          - "LOGLEVEL": Controls log output (default: "OFF")
 *          - "LOGGER": Either "CONSOLE" or "FILE" (default: "CONSOLE")
 *          - "LOGFILE": Log file destination (default: "htmjava.log")
 *
 *      For example:
 *
 *          java -DLOGLEVEL=TRACE -DLOGGER=FILE -jar htm.java-nab.jar "{\"modelParams\":{....}}" < nab_data.csv > anomalies.out
 *
 */
@SuppressWarnings("resource")
public static void main(String[] args) {
    try {
        LOGGER.trace("main({})", Arrays.asList(args));
        // Parse command line args
        OptionParser parser = new OptionParser();
        parser.nonOptions("OPF parameters object (JSON)");
        parser.acceptsAll(Arrays.asList("p", "params"),
                "OPF parameters file (JSON).\n(default: first non-option argument)").withOptionalArg()
                .ofType(File.class);
        parser.acceptsAll(Arrays.asList("i", "input"), "Input data file (csv).\n(default: stdin)")
                .withOptionalArg().ofType(File.class);
        parser.acceptsAll(Arrays.asList("o", "output"), "Output results file (csv).\n(default: stdout)")
                .withOptionalArg().ofType(File.class);
        parser.acceptsAll(Arrays.asList("s", "skip"), "Header lines to skip").withOptionalArg()
                .ofType(Integer.class).defaultsTo(0);
        parser.acceptsAll(Arrays.asList("h", "?", "help"), "Help");
        OptionSet options = parser.parse(args);
        if (args.length == 0 || options.has("h")) {
            parser.printHelpOn(System.out);
            return;
        }

        // Get in/out files
        final PrintStream output;
        final InputStream input;
        if (options.has("i")) {
            input = new FileInputStream((File) options.valueOf("i"));
        } else {
            input = System.in;
        }
        if (options.has("o")) {
            output = new PrintStream((File) options.valueOf("o"));
        } else {
            output = System.out;
        }

        // Parse OPF Model Parameters
        JsonNode params;
        ObjectMapper mapper = new ObjectMapper();
        if (options.has("p")) {
            params = mapper.readTree((File) options.valueOf("p"));
        } else if (options.nonOptionArguments().isEmpty()) {
            try {
                input.close();
            } catch (Exception ignore) {
            }
            if (options.has("o")) {
                try {
                    output.flush();
                    output.close();
                } catch (Exception ignore) {
                }
            }
            throw new IllegalArgumentException("Expecting OPF parameters. See 'help' for more information");
        } else {
            params = mapper.readTree((String) options.nonOptionArguments().get(0));
        }

        // Number of header lines to skip
        int skip = (int) options.valueOf("s");

        // Force timezone to UTC
        DateTimeZone.setDefault(DateTimeZone.UTC);

        // Create NAB Network Model
        HTMModel model = new HTMModel(params);
        Network network = model.getNetwork();
        network.observe().subscribe((inference) -> {
            double score = inference.getAnomalyScore();
            int record = inference.getRecordNum();
            LOGGER.trace("record = {}, score = {}", record, score);
            // Output raw anomaly score
            output.println(score);
        }, (error) -> {
            LOGGER.error("Error processing data", error);
        }, () -> {
            LOGGER.trace("Done processing data");
            if (LOGGER.isDebugEnabled()) {
                model.showDebugInfo();
            }
        });
        network.start();

        // Pipe data to network
        Publisher publisher = model.getPublisher();
        BufferedReader in = new BufferedReader(new InputStreamReader(input));
        String line;
        while ((line = in.readLine()) != null && line.trim().length() > 0) {
            // Skip header lines
            if (skip > 0) {
                skip--;
                continue;
            }
            publisher.onNext(line);
        }
        publisher.onComplete();
        in.close();
        LOGGER.trace("Done publishing data");
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:io.anserini.index.IndexTweetsUpdatePlace.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(new Option(HELP_OPTION, "show help"));
    options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment"));
    options.addOption(new Option(STORE_TERM_VECTORS_OPTION, "store term vectors"));

    options.addOption(OptionBuilder.withArgName("collection").hasArg()
            .withDescription("source collection directory").create(COLLECTION_OPTION));
    options.addOption(//from  www  .j  a  va2  s  .  com
            OptionBuilder.withArgName("dir").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("file with deleted tweetids")
            .create(DELETES_OPTION));
    options.addOption(OptionBuilder.withArgName("id").hasArg().withDescription("max id").create(MAX_ID_OPTION));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (cmdline.hasOption(HELP_OPTION) || !cmdline.hasOption(COLLECTION_OPTION)
            || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(IndexTweetsUpdatePlace.class.getName(), options);
        System.exit(-1);
    }

    String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION);
    String indexPath = cmdline.getOptionValue(INDEX_OPTION);

    System.out.println(collectionPath + " " + indexPath);

    LOG.info("collection: " + collectionPath);
    LOG.info("index: " + indexPath);

    long startTime = System.currentTimeMillis();
    File file = new File(collectionPath);
    if (!file.exists()) {
        System.err.println("Error: " + file + " does not exist!");
        System.exit(-1);
    }

    final FieldType textOptions = new FieldType();
    textOptions.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
    textOptions.setStored(true);
    textOptions.setTokenized(true);
    if (cmdline.hasOption(STORE_TERM_VECTORS_OPTION)) {
        textOptions.setStoreTermVectors(true);

    }

    final StatusStream stream = new JsonStatusCorpusReader(file);

    final Directory dir = new SimpleFSDirectory(Paths.get(cmdline.getOptionValue(INDEX_OPTION)));
    final IndexWriterConfig config = new IndexWriterConfig(ANALYZER);

    config.setOpenMode(IndexWriterConfig.OpenMode.APPEND);

    final IndexWriter writer = new IndexWriter(dir, config);
    System.out.print("Original # of docs " + writer.numDocs());
    int updateCount = 0;

    Runtime.getRuntime().addShutdownHook(new Thread() {
        public void run() {

            try {
                stream.close();
            } catch (IOException e1) {
                // TODO Auto-generated catch block
                e1.printStackTrace();
            }

            try {
                writer.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            try {
                dir.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            System.out.println("Shutting down");

        }
    });
    int cnt = 0;
    Status status;
    try {
        while ((status = stream.next()) != null) {

            if (status.getPlace() != null) {

                //               Query q = NumericRangeQuery.newLongRange(TweetStreamReader.StatusField.ID.name, status.getId(),
                //                     status.getId(), true, true);
                //               System.out.print("Deleting docCount="+writer.numDocs());
                //               writer.deleteDocuments(q);
                //               writer.commit();
                //               System.out.print(" Deleted docCount="+writer.numDocs());

                Document doc = new Document();
                doc.add(new LongField(StatusField.ID.name, status.getId(), Field.Store.YES));
                doc.add(new LongField(StatusField.EPOCH.name, status.getEpoch(), Field.Store.YES));
                doc.add(new TextField(StatusField.SCREEN_NAME.name, status.getScreenname(), Store.YES));

                doc.add(new Field(StatusField.TEXT.name, status.getText(), textOptions));

                doc.add(new IntField(StatusField.FRIENDS_COUNT.name, status.getFollowersCount(), Store.YES));
                doc.add(new IntField(StatusField.FOLLOWERS_COUNT.name, status.getFriendsCount(), Store.YES));
                doc.add(new IntField(StatusField.STATUSES_COUNT.name, status.getStatusesCount(), Store.YES));
                doc.add(new DoubleField(StatusField.LONGITUDE.name, status.getLongitude(), Store.YES));
                doc.add(new DoubleField(StatusField.LATITUDE.name, status.getlatitude(), Store.YES));
                doc.add(new StringField(StatusField.PLACE.name, status.getPlace(), Store.YES));
                long inReplyToStatusId = status.getInReplyToStatusId();
                if (inReplyToStatusId > 0) {
                    doc.add(new LongField(StatusField.IN_REPLY_TO_STATUS_ID.name, inReplyToStatusId,
                            Field.Store.YES));
                    doc.add(new LongField(StatusField.IN_REPLY_TO_USER_ID.name, status.getInReplyToUserId(),
                            Field.Store.YES));
                }

                String lang = status.getLang();
                if (!lang.equals("unknown")) {
                    doc.add(new TextField(StatusField.LANG.name, status.getLang(), Store.YES));
                }

                long retweetStatusId = status.getRetweetedStatusId();
                if (retweetStatusId > 0) {
                    doc.add(new LongField(StatusField.RETWEETED_STATUS_ID.name, retweetStatusId,
                            Field.Store.YES));
                    doc.add(new LongField(StatusField.RETWEETED_USER_ID.name, status.getRetweetedUserId(),
                            Field.Store.YES));
                    doc.add(new IntField(StatusField.RETWEET_COUNT.name, status.getRetweetCount(), Store.YES));
                    if (status.getRetweetCount() < 0 || status.getRetweetedStatusId() < 0) {
                        LOG.warn("Error parsing retweet fields of " + status.getId());
                    }
                }

                long id = status.getId();
                BytesRefBuilder brb = new BytesRefBuilder();
                NumericUtils.longToPrefixCodedBytes(id, 0, brb);
                Term term = new Term(StatusField.ID.name, brb.get());
                writer.updateDocument(term, doc);

                //               writer.addDocument(doc);

                updateCount += 1;

                if (updateCount % 10000 == 0) {

                    LOG.info(updateCount + " statuses updated");
                    writer.commit();
                    System.out.println("Updated docCount=" + writer.numDocs());
                }

            }

        }

        LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        writer.close();
        dir.close();
        stream.close();
    }
}