List of usage examples for java.util Arrays toString
public static String toString(Object[] a)
From source file:deck36.storm.plan9.nodejs.ExtendedKittenRobbersTopology.java
public static void main(String[] args) throws Exception { String env = null;// ww w .j a v a 2 s . c o m if (args != null && args.length > 0) { env = args[0]; } if (!"dev".equals(env)) if (!"prod".equals(env)) { System.out.println("Usage: $0 (dev|prod)\n"); System.exit(1); } // Topology config Config conf = new Config(); // Load parameters and add them to the Config Map configMap = YamlLoader.loadYamlFromResource("storm_" + env + ".yml"); conf.putAll(configMap); log.info(JSONValue.toJSONString((conf))); // Set topology loglevel to DEBUG conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug")); // Create Topology builder TopologyBuilder builder = new TopologyBuilder(); // if there are not special reasons, start with parallelism hint of 1 // and multiple tasks. By that, you can scale dynamically later on. int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint"); int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks"); String badgeName = ExtendedKittenRobbersTopology.class.getSimpleName(); // Create Stream from RabbitMQ messages // bind new queue with name of the topology // to the main plan9 exchange (from properties config) // consuming only CBT-related events by using the rounting key 'cbt.#' String rabbitQueueName = badgeName; // use topology class name as name for the queue String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.rabbitmq.exchange"); String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.rabbitmq.routing_key"); // Get JSON deserialization scheme Scheme rabbitScheme = new SimpleJSONScheme(); // Setup a Declarator to configure exchange/queue/routing key RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName, rabbitRoutingKey); // Create Configuration for the Spout ConnectionConfig connectionConfig = new ConnectionConfig( (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat")); ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName) .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail() .build(); // add global parameters to topology config - the RabbitMQSpout will read them from there conf.putAll(spoutConfig.asMap()); // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md if ("prod".equals(env)) { conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")); } // Add RabbitMQ spout to topology builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint) .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks")); // construct command to invoke the external bolt implementation ArrayList<String> command = new ArrayList(15); // Add main execution program (php, hhvm, zend, ..) and parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.nodejs.executor")); // Add main route to be invoked and its parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.main")); List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.params"); if (boltParams != null) command.addAll(boltParams); // Log the final command log.info("Command to start bolt for Extended Kitten Robbers From Outer Space: " + Arrays.toString(command.toArray())); // CODE1 /* We need to use the tick tuple adapter instead of the general adapter: // Add constructed external bolt command to topology using MultilangAdapterTickTupleBolt builder.setBolt("badge", new MultilangAdapterTickTupleBolt( command, (Integer) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.robber_frequency"), "badge" ), parallelism_hint) .setNumTasks(num_tasks) .shuffleGrouping("incoming"); */ builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt( (String) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.rabbitmq.target_exchange"), "KittenRobbers" // RabbitMQ routing key ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("badge"); builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks) .shuffleGrouping("rabbitmq_router"); if ("dev".equals(env)) { LocalCluster cluster = new LocalCluster(); cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology()); Thread.sleep(2000000); } if ("prod".equals(env)) { StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf, builder.createTopology()); } }
From source file:deck36.storm.plan9.php.DeludedKittenRobbersTopology.java
public static void main(String[] args) throws Exception { String env = null;//from w ww . j av a2s . c o m if (args != null && args.length > 0) { env = args[0]; } if (!"dev".equals(env)) if (!"prod".equals(env)) { System.out.println("Usage: $0 (dev|prod)\n"); System.exit(1); } // Topology config Config conf = new Config(); // Load parameters and add them to the Config Map configMap = YamlLoader.loadYamlFromResource("config_" + env + ".yml"); conf.putAll(configMap); log.info(JSONValue.toJSONString((conf))); // Set topology loglevel to DEBUG conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug")); // Create Topology builder TopologyBuilder builder = new TopologyBuilder(); // if there are not special reasons, start with parallelism hint of 1 // and multiple tasks. By that, you can scale dynamically later on. int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint"); int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks"); // Create Stream from RabbitMQ messages // bind new queue with name of the topology // to the main plan9 exchange (from properties config) // consuming only CBT-related events by using the rounting key 'cbt.#' String badgeName = DeludedKittenRobbersTopology.class.getSimpleName(); String rabbitQueueName = badgeName; // use topology class name as name for the queue String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.DeludedKittenRobbersBolt.rabbitmq.exchange"); String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.DeludedKittenRobbersBolt.rabbitmq.routing_key"); // Get JSON deserialization scheme Scheme rabbitScheme = new SimpleJSONScheme(); // Setup a Declarator to configure exchange/queue/routing key RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName, rabbitRoutingKey); // Create Configuration for the Spout ConnectionConfig connectionConfig = new ConnectionConfig( (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat")); ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName) .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail() .build(); // add global parameters to topology config - the RabbitMQSpout will read them from there conf.putAll(spoutConfig.asMap()); // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md if ("prod".equals(env)) { conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")); } // Add RabbitMQ spout to topology builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint) .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks")); // construct command to invoke the external bolt implementation ArrayList<String> command = new ArrayList(15); // Add main execution program (php, hhvm, zend, ..) and parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.php.executor")); command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.executor_params")); // Add main command to be executed (app/console, the phar file, etc.) and global context parameters (environment etc.) command.add((String) JsonPath.read(conf, "$.deck36_storm.php.main")); command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.main_params")); // Add main route to be invoked and its parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.DeludedKittenRobbersBolt.main")); List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.DeludedKittenRobbersBolt.params"); if (boltParams != null) command.addAll(boltParams); // Log the final command log.info("Command to start bolt for Deluded Kitten Robbers: " + Arrays.toString(command.toArray())); // Add constructed external bolt command to topology using MultilangAdapterTickTupleBolt builder.setBolt("badge", new MultilangAdapterTickTupleBolt(command, (Integer) JsonPath.read(conf, "$.deck36_storm.DeludedKittenRobbersBolt.attack_frequency_secs"), "badge"), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("incoming"); builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt( (String) JsonPath.read(conf, "$.deck36_storm.DeludedKittenRobbersBolt.rabbitmq.target_exchange"), "DeludedKittenRobbers" // RabbitMQ routing key ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("badge"); builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks) .shuffleGrouping("rabbitmq_router"); if ("dev".equals(env)) { LocalCluster cluster = new LocalCluster(); cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology()); Thread.sleep(2000000); } if ("prod".equals(env)) { StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf, builder.createTopology()); } }
From source file:PartitionLR.java
public static void main(String[] args) { if (args.length < 6) { System.err.println("Usage: JavaHdfsLR <master> <file> <iters> <L> <D> <use FPGA?> (<testing file>)"); System.exit(1);/*from w ww .ja v a 2s .c o m*/ } int ITERATIONS = Integer.parseInt(args[2]); System.out.println("iterations: " + ITERATIONS); L = Integer.parseInt(args[3]); System.out.println("L: " + L); D = Integer.parseInt(args[4]); System.out.println("D: " + D); useFPGA = Integer.parseInt(args[5]); System.out.println("use FPGA: " + useFPGA); JavaSparkContext sc = new JavaSparkContext(args[0], "PartitionLR", System.getenv("SPARK_HOME"), "target/simple-project-1.0.jar"); JavaRDD<String> lines = sc.textFile(args[1]); JavaRDD<DataPoint> points = lines.map(new ParsePoint()).repartition(32).cache(); float[][] w = new float[L][D]; for (int i = 0; i < L; i++) { for (int j = 0; j < D; j++) { w[i][j] = 0.0f; } } System.out.print("Initial w: "); printWeights(w); for (int k = 1; k <= ITERATIONS; k++) { System.out.println("On iteration " + k); long tic = System.nanoTime(); float[][] gradient = points.mapPartitions(new BackwardLR(w)).reduce(new VectorSum()); System.out.println("elapsed time: " + (System.nanoTime() - tic) / 1e9); for (int i = 0; i < L; i++) { for (int j = 0; j < D; j++) { w[i][j] -= gradient[i][j]; } } } System.out.print("Final w: "); printWeights(w); lines = sc.textFile(args.length < 7 ? args[1] : args[6]); System.out.println("first prediction"); System.out.println(Arrays .toString(lines.map(new ParsePoint()).repartition(32).mapPartitions(new ForwardLR(w)).first())); System.exit(0); }
From source file:eu.fbk.utils.twm.FormPageSearcher.java
public static void main(final String args[]) throws Exception { String logConfig = System.getProperty("log-config"); if (logConfig == null) { logConfig = "configuration/log-config.txt"; }//from www .j a va2s. c o m PropertyConfigurator.configure(logConfig); final Options options = new Options(); try { OptionBuilder.withArgName("index"); OptionBuilder.hasArg(); OptionBuilder.withDescription("open an index with the specified name"); OptionBuilder.isRequired(); OptionBuilder.withLongOpt("index"); final Option indexNameOpt = OptionBuilder.create("i"); OptionBuilder.withArgName("interactive-mode"); OptionBuilder.withDescription("enter in the interactive mode"); OptionBuilder.withLongOpt("interactive-mode"); final Option interactiveModeOpt = OptionBuilder.create("t"); OptionBuilder.withArgName("search"); OptionBuilder.hasArg(); OptionBuilder.withDescription("search for the specified key"); OptionBuilder.withLongOpt("search"); final Option searchOpt = OptionBuilder.create("s"); OptionBuilder.withArgName("key-freq"); OptionBuilder.hasArg(); OptionBuilder.withDescription("read the keys' frequencies from the specified file"); OptionBuilder.withLongOpt("key-freq"); final Option freqFileOpt = OptionBuilder.create("f"); OptionBuilder.withArgName("minimum-freq"); // Option keyFieldNameOpt = // OptionBuilder.withArgName("key-field-name").hasArg().withDescription("use the specified name for the field key").withLongOpt("key-field-name").create("k"); // Option valueFieldNameOpt = // OptionBuilder.withArgName("value-field-name").hasArg().withDescription("use the specified name for the field value").withLongOpt("value-field-name").create("v"); final Option minimumKeyFreqOpt = OptionBuilder.hasArg() .withDescription("minimum key frequency of cached values (default is " + DEFAULT_MIN_FREQ + ")") .withLongOpt("minimum-freq").create("m"); OptionBuilder.withArgName("int"); final Option notificationPointOpt = OptionBuilder.hasArg() .withDescription( "receive notification every n pages (default is " + DEFAULT_NOTIFICATION_POINT + ")") .withLongOpt("notification-point").create("b"); options.addOption("h", "help", false, "print this message"); options.addOption("v", "version", false, "output version information and exit"); options.addOption(indexNameOpt); options.addOption(interactiveModeOpt); options.addOption(searchOpt); options.addOption(freqFileOpt); // options.addOption(keyFieldNameOpt); // options.addOption(valueFieldNameOpt); options.addOption(minimumKeyFreqOpt); options.addOption(notificationPointOpt); final CommandLineParser parser = new PosixParser(); final CommandLine line = parser.parse(options, args); if (line.hasOption("help") || line.hasOption("version")) { throw new ParseException(""); } int minFreq = DEFAULT_MIN_FREQ; if (line.hasOption("minimum-freq")) { minFreq = Integer.parseInt(line.getOptionValue("minimum-freq")); } int notificationPoint = DEFAULT_NOTIFICATION_POINT; if (line.hasOption("notification-point")) { notificationPoint = Integer.parseInt(line.getOptionValue("notification-point")); } final FormPageSearcher pageFormSearcher = new FormPageSearcher(line.getOptionValue("index")); pageFormSearcher.setNotificationPoint(notificationPoint); /* * logger.debug(line.getOptionValue("key-field-name") + "\t" + * line.getOptionValue("value-field-name")); if (line.hasOption("key-field-name")) { * pageFormSearcher.setKeyFieldName(line.getOptionValue("key-field-name")); } if * (line.hasOption("value-field-name")) { * pageFormSearcher.setValueFieldName(line.getOptionValue("value-field-name")); } */ if (line.hasOption("key-freq")) { pageFormSearcher.loadCache(line.getOptionValue("key-freq"), minFreq); } if (line.hasOption("search")) { logger.debug("searching " + line.getOptionValue("search") + "..."); final FreqSetSearcher.Entry[] result = pageFormSearcher.search(line.getOptionValue("search")); logger.info(Arrays.toString(result)); } if (line.hasOption("interactive-mode")) { pageFormSearcher.interactive(); } } catch (final ParseException e) { // oops, something went wrong if (e.getMessage().length() > 0) { System.out.println("Parsing failed: " + e.getMessage() + "\n"); } final HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(400, "java -cp dist/thewikimachine.jar org.fbk.cit.hlt.thewikimachine.index.FormPageSearcher", "\n", options, "\n", true); } }
From source file:fr.inria.atlanmod.atl_mr.utils.NeoEMFHBaseMigrator.java
public static void main(String[] args) { Options options = new Options(); Option inputOpt = OptionBuilder.create(IN); inputOpt.setArgName("INPUT"); inputOpt.setDescription("Input file, both of xmi and zxmi extensions are supported"); inputOpt.setArgs(1);//from ww w . j a v a2 s . c o m inputOpt.setRequired(true); Option outputOpt = OptionBuilder.create(OUT); outputOpt.setArgName("OUTPUT"); outputOpt.setDescription("Output HBase resource URI"); outputOpt.setArgs(1); outputOpt.setRequired(true); Option inClassOpt = OptionBuilder.create(E_PACKAGE); inClassOpt.setArgName("METAMODEL"); inClassOpt.setDescription("URI of the ecore Metamodel"); inClassOpt.setArgs(1); inClassOpt.setRequired(true); options.addOption(inputOpt); options.addOption(outputOpt); options.addOption(inClassOpt); CommandLineParser parser = new PosixParser(); try { CommandLine commandLine = parser.parse(options, args); URI sourceUri = URI.createFileURI(commandLine.getOptionValue(IN)); URI targetUri = URI.createURI(commandLine.getOptionValue(OUT)); URI metamodelUri = URI.createFileURI(commandLine.getOptionValue(E_PACKAGE)); NeoEMFHBaseMigrator.class.getClassLoader().loadClass(commandLine.getOptionValue(E_PACKAGE)) .getMethod("init").invoke(null); //org.eclipse.gmt.modisco.java.kyanos.impl.JavaPackageImpl.init(); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("ecore", new EcoreResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("xmi", new XMIResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("zxmi", new XMIResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap().put(KyanosURI.KYANOS_HBASE_SCHEME, KyanosResourceFactory.eINSTANCE); //Registering the metamodel // Resource MMResource = resourceSet.createResource(metamodelUri); // MMResource.load(Collections.EMPTY_MAP); // ATLMRUtils.registerPackages(resourceSet, MMResource); //Loading the XMI resource Resource sourceResource = resourceSet.createResource(sourceUri); Map<String, Object> loadOpts = new HashMap<String, Object>(); if ("zxmi".equals(sourceUri.fileExtension())) { loadOpts.put(XMIResource.OPTION_ZIP, Boolean.TRUE); } Runtime.getRuntime().gc(); long initialUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); LOG.log(Level.INFO, MessageFormat.format("Used memory before loading: {0}", ATLMRUtils.byteCountToDisplaySize(initialUsedMemory))); LOG.log(Level.INFO, "Loading source resource"); sourceResource.load(loadOpts); LOG.log(Level.INFO, "Source resource loaded"); Runtime.getRuntime().gc(); long finalUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); LOG.log(Level.INFO, MessageFormat.format("Used memory after loading: {0}", ATLMRUtils.byteCountToDisplaySize(finalUsedMemory))); LOG.log(Level.INFO, MessageFormat.format("Memory use increase: {0}", ATLMRUtils.byteCountToDisplaySize(finalUsedMemory - initialUsedMemory))); Resource targetResource = resourceSet.createResource(targetUri); Map<String, Object> saveOpts = new HashMap<String, Object>(); targetResource.save(saveOpts); LOG.log(Level.INFO, "Start moving elements"); targetResource.getContents().clear(); targetResource.getContents().addAll(sourceResource.getContents()); LOG.log(Level.INFO, "End moving elements"); LOG.log(Level.INFO, "Start saving"); targetResource.save(saveOpts); LOG.log(Level.INFO, "Saved"); if (targetResource instanceof KyanosHbaseResourceImpl) { KyanosHbaseResourceImpl.shutdownWithoutUnload((KyanosHbaseResourceImpl) targetResource); } else { targetResource.unload(); } } catch (ParseException e) { ATLMRUtils.showError(e.toString()); ATLMRUtils.showError("Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { ATLMRUtils.showError(e.toString()); e.printStackTrace(); } }
From source file:kishida.cnn.NeuralNetwork.java
public static void main(String[] args) throws IOException { NeuralNetwork nn = new NeuralNetwork(); nn.getLayers()/*from w w w .j a v a 2 s . c om*/ .addAll(Arrays.asList(new InputLayer(20, 20), new ConvolutionLayer("conv1", 3, 7, 2, 1, true), new MaxPoolingLayer("pool", 3, 2), new MultiNormalizeLayer("norm1", 5, .0001f, true), new FullyConnect("test", 3, 0, 1, new LogisticFunction(), true))); nn.init(); nn.random.nextInt(); StringWriter sw = new StringWriter(); nn.writeAsJson(sw); System.out.println(sw); // ?????????????? StringReader sr0 = new StringReader(sw.toString()); NeuralNetwork nn0 = nn.readFromJson(sr0); nn0.init(); ConvolutionLayer conv1o = (ConvolutionLayer) nn.findLayerByName("conv1").get(); ConvolutionLayer conv1r = (ConvolutionLayer) nn0.findLayerByName("conv1").get(); System.out.println("org:" + Arrays.toString(conv1o.getFilter())); System.out.println("red:" + Arrays.toString(conv1r.getFilter())); double loss = IntStream.range(0, conv1o.getFilter().length) .mapToDouble(i -> (conv1o.getFilter()[i] - conv1r.getFilter()[i]) * (conv1o.getFilter()[i] - conv1r.getFilter()[i])) .sum(); System.out.println(Math.sqrt(loss)); NeuralNetwork v = NeuralNetwork.readFromJson(new StringReader("{\n" + " \"weightDecay\" : 5.0E-4,\n" + " \"miniBatch\" : 128,\n" + " \"random\" : \"c3EAfgAAAT/wWGBKFyCXAAATnQ6sF654\",\n" + " \"imageRandom\" : \"c3EAfgAAAAAAAAAAAAAAAAAABd7s70R4\",\n" + " \"momentam\" : 0.9,\n" + " \"layers\" : [ {\n" + " \"InputLayer\" : {\n" + " \"width\" : 250,\n" + " \"height\" : 220,\n" + " \"name\" : \"input\"\n" + " }\n" + " }, {\n" + " \"ConvolutionLayer\" : {\n" + " \"name\" : \"conv1\",\n" + " \"filter\" : null,\n" + " \"bias\" : [ 1.0, 1.0, 1.0 ],\n" + " \"filterDelta\" : null,\n" + " \"biasDelta\" : [ 0.0, 0.0, 0.0 ],\n" + " \"stride\" : 2,\n" + " \"filterSize\" : 7,\n" + " \"useGpu\" : true\n" + " }\n" + " }, {\n" + " \"MaxPoolingLayer\" : {\n" + " \"name\" : \"pool\",\n" + " \"size\" : 3,\n" + " \"stride\" : 2\n" + " }\n" + " }, {\n" + " \"MultiNormalizeLayer\" : {\n" + " \"name\" : \"norm1\",\n" + " \"size\" : 5,\n" + " \"threshold\" : 1.0E-4,\n" + " \"useGpu\" : true\n" + " }\n" + " }, {\n" + " \"FullyConnect\" : {\n" + " \"name\" : \"test\",\n" + " \"outputSize\" : 3,\n" + " \"weight\" : [ 0.0014115907, 0.0043465886, 0.01138472, -0.0013297468, " + "-0.0060525155, -0.0109255025, -0.015493984, 0.011872963, -0.0015145391 ],\n" + " \"initBias\" : 0.5, " + " \"bias\" : [ 0.0, 0.2, 0.4 ],\n" + " \"weightDelta\" : [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n" + " \"biasDelta\" : [ 0.0, 0.0, 0.0 ],\n" + " \"dropoutRate\" : 1.0,\n" + " \"activation\" : \"LogisticFunction\",\n" + " \"useGpu\" : true\n" + " }\n" + " } ],\n" + " \"learningRate\" : 0.01\n" + "}")); System.out.println(nn.random.nextInt()); System.out.println(v.random.nextInt()); v.findLayerByName("test").ifPresent(layer -> { FullyConnect f = (FullyConnect) layer; System.out.println(f.getActivation().getClass()); System.out.println(Arrays.toString(f.getBias())); }); v.init(); v.findLayerByName("test").ifPresent(layer -> { FullyConnect f = (FullyConnect) layer; System.out.println(f.getActivation().getClass()); System.out.println(Arrays.toString(f.getBias())); }); }
From source file:hu.bme.mit.sette.run.Run.java
public static void main(String[] args) { LOG.debug("main() called"); // parse properties Properties prop = new Properties(); InputStream is = null;// w ww .j av a2 s . c o m try { is = new FileInputStream(SETTE_PROPERTIES); prop.load(is); } catch (IOException e) { System.err.println("Parsing " + SETTE_PROPERTIES + " has failed"); e.printStackTrace(); System.exit(1); } finally { IOUtils.closeQuietly(is); } String[] basedirs = StringUtils.split(prop.getProperty("basedir"), '|'); String snippetDir = prop.getProperty("snippet-dir"); String snippetProject = prop.getProperty("snippet-project"); String catgPath = prop.getProperty("catg"); String catgVersionFile = prop.getProperty("catg-version-file"); String jPETPath = prop.getProperty("jpet"); String jPETDefaultBuildXml = prop.getProperty("jpet-default-build.xml"); String jPETVersionFile = prop.getProperty("jpet-version-file"); String spfPath = prop.getProperty("spf"); String spfDefaultBuildXml = prop.getProperty("spf-default-build.xml"); String spfVersionFile = prop.getProperty("spf-version-file"); String outputDir = prop.getProperty("output-dir"); Validate.notEmpty(basedirs, "At least one basedir must be specified in " + SETTE_PROPERTIES); Validate.notBlank(snippetDir, "The property snippet-dir must be set in " + SETTE_PROPERTIES); Validate.notBlank(snippetProject, "The property snippet-project must be set in " + SETTE_PROPERTIES); Validate.notBlank(catgPath, "The property catg must be set in " + SETTE_PROPERTIES); Validate.notBlank(jPETPath, "The property jpet must be set in " + SETTE_PROPERTIES); Validate.notBlank(spfPath, "The property spf must be set in " + SETTE_PROPERTIES); Validate.notBlank(outputDir, "The property output-dir must be set in " + SETTE_PROPERTIES); String basedir = null; for (String bd : basedirs) { bd = StringUtils.trimToEmpty(bd); if (bd.startsWith("~")) { // Linux home bd = System.getProperty("user.home") + bd.substring(1); } FileValidator v = new FileValidator(new File(bd)); v.type(FileType.DIRECTORY); if (v.isValid()) { basedir = bd; break; } } if (basedir == null) { System.err.println("basedir = " + Arrays.toString(basedirs)); System.err.println("ERROR: No valid basedir was found, please check " + SETTE_PROPERTIES); System.exit(2); } BASEDIR = new File(basedir); SNIPPET_DIR = new File(basedir, snippetDir); SNIPPET_PROJECT = snippetProject; OUTPUT_DIR = new File(basedir, outputDir); try { String catgVersion = readToolVersion(new File(BASEDIR, catgVersionFile)); if (catgVersion != null) { new CatgTool(new File(BASEDIR, catgPath), catgVersion); } String jPetVersion = readToolVersion(new File(BASEDIR, jPETVersionFile)); if (jPetVersion != null) { new JPetTool(new File(BASEDIR, jPETPath), new File(BASEDIR, jPETDefaultBuildXml), jPetVersion); } String spfVersion = readToolVersion(new File(BASEDIR, spfVersionFile)); if (spfVersion != null) { new SpfTool(new File(BASEDIR, spfPath), new File(BASEDIR, spfDefaultBuildXml), spfVersion); } // TODO stuff stuff(args); } catch (Exception e) { System.err.println(ExceptionUtils.getStackTrace(e)); ValidatorException vex = (ValidatorException) e; for (ValidationException v : vex.getValidator().getAllExceptions()) { v.printStackTrace(); } // System.exit(0); e.printStackTrace(); System.err.println("=========="); e.printStackTrace(); if (e instanceof ValidatorException) { System.err.println("Details:"); System.err.println(((ValidatorException) e).getFullMessage()); } else if (e.getCause() instanceof ValidatorException) { System.err.println("Details:"); System.err.println(((ValidatorException) e.getCause()).getFullMessage()); } } }
From source file:eqtlmappingpipeline.util.ModuleEqtlGeuvadisReplication.java
/** * @param args the command line arguments */// www .j a v a 2 s . co m public static void main(String[] args) throws IOException, LdCalculatorException { System.out.println(HEADER); System.out.println(); System.out.flush(); //flush to make sure header is before errors try { Thread.sleep(25); //Allows flush to complete } catch (InterruptedException ex) { } CommandLineParser parser = new PosixParser(); final CommandLine commandLine; try { commandLine = parser.parse(OPTIONS, args, true); } catch (ParseException ex) { System.err.println("Invalid command line arguments: " + ex.getMessage()); System.err.println(); new HelpFormatter().printHelp(" ", OPTIONS); System.exit(1); return; } final String[] genotypesBasePaths = commandLine.getOptionValues("g"); final RandomAccessGenotypeDataReaderFormats genotypeDataType; final String replicationQtlFilePath = commandLine.getOptionValue("e"); final String interactionQtlFilePath = commandLine.getOptionValue("i"); final String outputFilePath = commandLine.getOptionValue("o"); final double ldCutoff = Double.parseDouble(commandLine.getOptionValue("ld")); final int window = Integer.parseInt(commandLine.getOptionValue("w")); System.out.println("Genotype: " + Arrays.toString(genotypesBasePaths)); System.out.println("Interaction file: " + interactionQtlFilePath); System.out.println("Replication file: " + replicationQtlFilePath); System.out.println("Output: " + outputFilePath); System.out.println("LD: " + ldCutoff); System.out.println("Window: " + window); try { if (commandLine.hasOption("G")) { genotypeDataType = RandomAccessGenotypeDataReaderFormats .valueOf(commandLine.getOptionValue("G").toUpperCase()); } else { if (genotypesBasePaths[0].endsWith(".vcf")) { System.err.println( "Only vcf.gz is supported. Please see manual on how to do create a vcf.gz file."); System.exit(1); return; } try { genotypeDataType = RandomAccessGenotypeDataReaderFormats .matchFormatToPath(genotypesBasePaths[0]); } catch (GenotypeDataException e) { System.err .println("Unable to determine input 1 type based on specified path. Please specify -G"); System.exit(1); return; } } } catch (IllegalArgumentException e) { System.err.println("Error parsing --genotypesFormat \"" + commandLine.getOptionValue("G") + "\" is not a valid input data format"); System.exit(1); return; } final RandomAccessGenotypeData genotypeData; try { genotypeData = genotypeDataType.createFilteredGenotypeData(genotypesBasePaths, 100, null, null, null, 0.8); } catch (TabixFileNotFoundException e) { LOGGER.fatal("Tabix file not found for input data at: " + e.getPath() + "\n" + "Please see README on how to create a tabix file"); System.exit(1); return; } catch (IOException e) { LOGGER.fatal("Error reading input data: " + e.getMessage(), e); System.exit(1); return; } catch (IncompatibleMultiPartGenotypeDataException e) { LOGGER.fatal("Error combining the impute genotype data files: " + e.getMessage(), e); System.exit(1); return; } catch (GenotypeDataException e) { LOGGER.fatal("Error reading input data: " + e.getMessage(), e); System.exit(1); return; } ChrPosTreeMap<ArrayList<EQTL>> replicationQtls = new QTLTextFile(replicationQtlFilePath, false) .readQtlsAsTreeMap(); int interactionSnpNotInGenotypeData = 0; int noReplicationQtlsInWindow = 0; int noReplicationQtlsInLd = 0; int multipleReplicationQtlsInLd = 0; int replicationTopSnpNotInGenotypeData = 0; final CSVWriter outputWriter = new CSVWriter(new FileWriter(new File(outputFilePath)), '\t', '\0'); final String[] outputLine = new String[14]; int c = 0; outputLine[c++] = "Chr"; outputLine[c++] = "Pos"; outputLine[c++] = "SNP"; outputLine[c++] = "Gene"; outputLine[c++] = "Module"; outputLine[c++] = "DiscoveryZ"; outputLine[c++] = "ReplicationZ"; outputLine[c++] = "DiscoveryZCorrected"; outputLine[c++] = "ReplicationZCorrected"; outputLine[c++] = "DiscoveryAlleleAssessed"; outputLine[c++] = "ReplicationAlleleAssessed"; outputLine[c++] = "bestLd"; outputLine[c++] = "bestLd_dist"; outputLine[c++] = "nextLd"; outputWriter.writeNext(outputLine); HashSet<String> notFound = new HashSet<>(); CSVReader interactionQtlReader = new CSVReader(new FileReader(interactionQtlFilePath), '\t'); interactionQtlReader.readNext();//skip header String[] interactionQtlLine; while ((interactionQtlLine = interactionQtlReader.readNext()) != null) { String snp = interactionQtlLine[1]; String chr = interactionQtlLine[2]; int pos = Integer.parseInt(interactionQtlLine[3]); String gene = interactionQtlLine[4]; String alleleAssessed = interactionQtlLine[9]; String module = interactionQtlLine[12]; double discoveryZ = Double.parseDouble(interactionQtlLine[10]); GeneticVariant interactionQtlVariant = genotypeData.getSnpVariantByPos(chr, pos); if (interactionQtlVariant == null) { System.err.println("Interaction QTL SNP not found in genotype data: " + chr + ":" + pos); ++interactionSnpNotInGenotypeData; continue; } EQTL bestMatch = null; double bestMatchR2 = Double.NaN; Ld bestMatchLd = null; double nextBestR2 = Double.NaN; ArrayList<EQTL> sameSnpQtls = replicationQtls.get(chr, pos); if (sameSnpQtls != null) { for (EQTL sameSnpQtl : sameSnpQtls) { if (sameSnpQtl.getProbe().equals(gene)) { bestMatch = sameSnpQtl; bestMatchR2 = 1; } } } NavigableMap<Integer, ArrayList<EQTL>> potentionalReplicationQtls = replicationQtls.getChrRange(chr, pos - window, true, pos + window, true); for (ArrayList<EQTL> potentialReplicationQtls : potentionalReplicationQtls.values()) { for (EQTL potentialReplicationQtl : potentialReplicationQtls) { if (!potentialReplicationQtl.getProbe().equals(gene)) { continue; } GeneticVariant potentialReplicationQtlVariant = genotypeData.getSnpVariantByPos( potentialReplicationQtl.getRsChr().toString(), potentialReplicationQtl.getRsChrPos()); if (potentialReplicationQtlVariant == null) { notFound.add(potentialReplicationQtl.getRsChr().toString() + ":" + potentialReplicationQtl.getRsChrPos()); ++replicationTopSnpNotInGenotypeData; continue; } Ld ld = interactionQtlVariant.calculateLd(potentialReplicationQtlVariant); double r2 = ld.getR2(); if (r2 > 1) { r2 = 1; } if (bestMatch == null) { bestMatch = potentialReplicationQtl; bestMatchR2 = r2; bestMatchLd = ld; } else if (r2 > bestMatchR2) { bestMatch = potentialReplicationQtl; nextBestR2 = bestMatchR2; bestMatchR2 = r2; bestMatchLd = ld; } } } double replicationZ = Double.NaN; double replicationZCorrected = Double.NaN; double discoveryZCorrected = Double.NaN; String replicationAlleleAssessed = null; if (bestMatch != null) { replicationZ = bestMatch.getZscore(); replicationAlleleAssessed = bestMatch.getAlleleAssessed(); if (pos != bestMatch.getRsChrPos()) { String commonHap = null; double commonHapFreq = -1; for (Map.Entry<String, Double> hapFreq : bestMatchLd.getHaplotypesFreq().entrySet()) { double f = hapFreq.getValue(); if (f > commonHapFreq) { commonHapFreq = f; commonHap = hapFreq.getKey(); } } String[] commonHapAlleles = StringUtils.split(commonHap, '/'); discoveryZCorrected = commonHapAlleles[0].equals(alleleAssessed) ? discoveryZ : discoveryZ * -1; replicationZCorrected = commonHapAlleles[1].equals(replicationAlleleAssessed) ? replicationZ : replicationZ * -1; } else { discoveryZCorrected = discoveryZ; replicationZCorrected = alleleAssessed.equals(replicationAlleleAssessed) ? replicationZ : replicationZ * -1; } } c = 0; outputLine[c++] = chr; outputLine[c++] = String.valueOf(pos); outputLine[c++] = snp; outputLine[c++] = gene; outputLine[c++] = module; outputLine[c++] = String.valueOf(discoveryZ); outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(replicationZ); outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(discoveryZCorrected); outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(replicationZCorrected); outputLine[c++] = alleleAssessed; outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(bestMatch.getAlleleAssessed()); outputLine[c++] = String.valueOf(bestMatchR2); outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(Math.abs(pos - bestMatch.getRsChrPos())); outputLine[c++] = String.valueOf(nextBestR2); outputWriter.writeNext(outputLine); } outputWriter.close(); for (String e : notFound) { System.err.println("Not found: " + e); } System.out.println("interactionSnpNotInGenotypeData: " + interactionSnpNotInGenotypeData); System.out.println("noReplicationQtlsInWindow: " + noReplicationQtlsInWindow); System.out.println("noReplicationQtlsInLd: " + noReplicationQtlsInLd); System.out.println("multipleReplicationQtlsInLd: " + multipleReplicationQtlsInLd); System.out.println("replicationTopSnpNotInGenotypeData: " + replicationTopSnpNotInGenotypeData); }
From source file:net.bobah.mail.Dupes.java
public static void main(String[] args) throws Exception { installDefaultUncaughtExceptionHandler(log); final CommandLineParser parser = new PosixParser(); final Options options = new Options() .addOption("j", "threads", true, "number of parallel threads to use for analyzing") .addOption("hash", true, "hash function to use, possible values: " + Arrays.toString(Hashes.values())) .addOption("dir", true, "add directory to search"); final CommandLine cmdline = parser.parse(options, args); final int threads = Integer.valueOf( cmdline.getOptionValue("threads", String.valueOf(Runtime.getRuntime().availableProcessors()))); final HashFunction hash = Hashes.valueOf(cmdline.getOptionValue("hash", "adler32")).hashfunc; final File[] dirs = Collections2 .transform(Arrays.asList(cmdline.getOptionValues("dir")), new Function<String, File>() { @Override//from w w w . j a va 2 s . c o m public File apply(String from) { return new File(from); } }).toArray(new File[] {}); log.info("hash: {}, threads: {}, dirs: {} in total", hash, threads, dirs.length); try { new Dupes(threads, hash, dirs).run(); } finally { Utils.shutdownLogger(); } }
From source file:alluxio.yarn.Client.java
/** * @param args Command line arguments//from w w w . j a v a2 s .co m */ public static void main(String[] args) { try { Client client = new Client(); System.out.println("Initializing Client"); if (!client.parseArgs(args)) { System.out.println("Cannot parse commandline: " + Arrays.toString(args)); System.exit(0); } System.out.println("Starting Client"); client.run(); } catch (Exception e) { System.err.println("Error running Client " + e); System.exit(1); } }