List of usage examples for java.util ArrayList add
public boolean add(E e)
From source file:geva.Operator.Operations.SinglePointCrossover.java
public static void main(String[] args) { SinglePointCrossover cop = new SinglePointCrossover(new MersenneTwisterFast(), 1); cop.setFixedCrossoverPoint(false);//from ww w . j a va 2 s . co m GEChromosome c1 = new GEChromosome(10); GEChromosome c2 = new GEChromosome(10); for (int i = 0; i < 20; i++) { c1.add(1); c2.add(2); } cop.makeNewChromosome(c1, c2, c1.size(), c2.size()); System.out.println(c1.toString()); System.out.println(c2.toString()); Genotype g1 = new Genotype(); Genotype g2 = new Genotype(); g1.add(c1); g2.add(c2); GEIndividual i1 = new GEIndividual(); GEIndividual i2 = new GEIndividual(); i1.setMapper(new GEGrammar()); i1.setGenotype(g1); i2.setMapper(new GEGrammar()); i2.setGenotype(g2); ArrayList<Individual> aI = new ArrayList<Individual>(2); aI.add(i1); aI.add(i2); cop.doOperation(aI); System.out.println(); System.out.println("Testing operation crossover"); System.out.println(); c1 = (GEChromosome) i1.getGenotype().get(0); c2 = (GEChromosome) i2.getGenotype().get(0); System.out.println(c1.toString()); System.out.println(c2.toString()); CrossoverModule cm = new CrossoverModule(new MersenneTwisterFast(), cop); SimplePopulation p = new SimplePopulation(); p.add(i1); p.add(i2); cm.setPopulation(p); long st = System.currentTimeMillis(); for (int i = 1; i < 100000000; i += 20) { cm.perform(); } long et = System.currentTimeMillis(); System.out.println("Done running: Total time(Ms) for " + 100000000 + " generations was" + (et - st)); System.out.println(); System.out.println("Testing module crossover"); System.out.println(); c1 = (GEChromosome) i1.getGenotype().get(0); c2 = (GEChromosome) i2.getGenotype().get(0); System.out.println(c1.toString()); System.out.println(c2.toString()); }
From source file:at.tlphotography.jAbuseReport.Reporter.java
/** * The main method./* ww w . j av a 2s .co m*/ * * @param args * the arguments */ public static void main(String[] args) { parseArguments(args); File[] directory = new File(logDir).listFiles(); // get the files in the dir for (File file : directory) // iterate over the file { if (!file.isDirectory() && file.getName().contains(logNames)) // if the file is not a dir and the name contains the logName string { if (file.getName().endsWith(".gz")) // is it zipped? { content.putAll(readGZFile(file)); } else { content.putAll(readLogFile(file)); } } } // save the mails to the log lines HashMap<String, ArrayList<LogObject>> finalContent = new HashMap<>(); Iterator<Entry<String, String>> it = content.entrySet().iterator(); while (it.hasNext()) { Map.Entry<String, String> pair = it.next(); String mail = whoIsLookUp(pair.getKey()); if (finalContent.containsKey(mail)) { finalContent.get(mail).add(new LogObject(pair.getValue())); } else { ArrayList<LogObject> temp = new ArrayList<LogObject>(); temp.add(new LogObject(pair.getValue())); finalContent.put(mail, temp); } it.remove(); } // sort them Iterator<Entry<String, ArrayList<LogObject>>> it2 = finalContent.entrySet().iterator(); while (it2.hasNext()) { Entry<String, ArrayList<LogObject>> pair = it2.next(); Collections.sort(pair.getValue()); println(pair.getKey() + " ="); for (LogObject obj : pair.getValue()) { println(obj.logContent); } println("\n"); it2.remove(); } }
From source file:deck36.storm.plan9.nodejs.ExtendedKittenRobbersTopology.java
public static void main(String[] args) throws Exception { String env = null;// w ww .jav a 2s. c om if (args != null && args.length > 0) { env = args[0]; } if (!"dev".equals(env)) if (!"prod".equals(env)) { System.out.println("Usage: $0 (dev|prod)\n"); System.exit(1); } // Topology config Config conf = new Config(); // Load parameters and add them to the Config Map configMap = YamlLoader.loadYamlFromResource("storm_" + env + ".yml"); conf.putAll(configMap); log.info(JSONValue.toJSONString((conf))); // Set topology loglevel to DEBUG conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug")); // Create Topology builder TopologyBuilder builder = new TopologyBuilder(); // if there are not special reasons, start with parallelism hint of 1 // and multiple tasks. By that, you can scale dynamically later on. int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint"); int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks"); String badgeName = ExtendedKittenRobbersTopology.class.getSimpleName(); // Create Stream from RabbitMQ messages // bind new queue with name of the topology // to the main plan9 exchange (from properties config) // consuming only CBT-related events by using the rounting key 'cbt.#' String rabbitQueueName = badgeName; // use topology class name as name for the queue String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.rabbitmq.exchange"); String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.rabbitmq.routing_key"); // Get JSON deserialization scheme Scheme rabbitScheme = new SimpleJSONScheme(); // Setup a Declarator to configure exchange/queue/routing key RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName, rabbitRoutingKey); // Create Configuration for the Spout ConnectionConfig connectionConfig = new ConnectionConfig( (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat")); ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName) .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail() .build(); // add global parameters to topology config - the RabbitMQSpout will read them from there conf.putAll(spoutConfig.asMap()); // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md if ("prod".equals(env)) { conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")); } // Add RabbitMQ spout to topology builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint) .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks")); // construct command to invoke the external bolt implementation ArrayList<String> command = new ArrayList(15); // Add main execution program (php, hhvm, zend, ..) and parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.nodejs.executor")); // Add main route to be invoked and its parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.main")); List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.params"); if (boltParams != null) command.addAll(boltParams); // Log the final command log.info("Command to start bolt for Extended Kitten Robbers From Outer Space: " + Arrays.toString(command.toArray())); // CODE1 /* We need to use the tick tuple adapter instead of the general adapter: // Add constructed external bolt command to topology using MultilangAdapterTickTupleBolt builder.setBolt("badge", new MultilangAdapterTickTupleBolt( command, (Integer) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.robber_frequency"), "badge" ), parallelism_hint) .setNumTasks(num_tasks) .shuffleGrouping("incoming"); */ builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt( (String) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.rabbitmq.target_exchange"), "KittenRobbers" // RabbitMQ routing key ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("badge"); builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks) .shuffleGrouping("rabbitmq_router"); if ("dev".equals(env)) { LocalCluster cluster = new LocalCluster(); cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology()); Thread.sleep(2000000); } if ("prod".equals(env)) { StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf, builder.createTopology()); } }
From source file:visualize.Visualize.java
public static void main(String[] args) throws NotEnoughDataPointsException, IllDefinedDataPointsException { XYSeries seriesQ = new XYSeries("quadratic"); XYSeries seriesL = new XYSeries("linear"); XYSeries seriesI = new XYSeries("intepolated"); final ArrayList<Point> pointsQ = new ArrayList<Point>(); for (double x = -5.0; x <= 5.0; x = x + 0.5) pointsQ.add(new Point(new double[] { x, 2.0 * x * x * x - 10 * x * x })); final LinearFunction fl = new LinearFunction(); final HigherOrderPolynomialFunction fq = new HigherOrderPolynomialFunction(3); final InterpolatedPolynomial<LinearFunction, HigherOrderPolynomialFunction> fi = new InterpolatedPolynomial<LinearFunction, HigherOrderPolynomialFunction>( new LinearFunction(), fq.copy(), 0.5); fl.fitFunction(pointsQ);//from w w w. j a v a 2 s . com fq.fitFunction(pointsQ); fi.fitFunction(pointsQ); System.out.println(fl); System.out.println(fq); System.out.println(fi.interpolatedFunction); for (double x = -5.0; x <= 5.0; x = x + 0.5) { seriesQ.add(x, fq.predict(x)); seriesL.add(x, fl.predict(x)); seriesI.add(x, fi.predict(x)); } XYSeriesCollection dataset = new XYSeriesCollection(); dataset.addSeries(seriesQ); dataset.addSeries(seriesL); dataset.addSeries(seriesI); JFreeChart chart = ChartFactory.createXYLineChart("XY Chart", "x-axis", "y-axis", dataset, PlotOrientation.VERTICAL, true, true, false); final XYPlot plot = chart.getXYPlot(); final XYItemRenderer renderer = plot.getRenderer(); renderer.setSeriesPaint(0, new Color(0, 0, 255)); renderer.setSeriesStroke(0, new BasicStroke(0.5f)); renderer.setSeriesPaint(1, new Color(255, 0, 0)); renderer.setSeriesStroke(1, new BasicStroke(0.5f)); renderer.setSeriesPaint(2, new Color(0, 200, 40)); renderer.setSeriesStroke(2, new BasicStroke(1.5f)); //chart.getXYPlot().setRenderer(new XYSplineRenderer(100)); JPanel panel = new JPanel(); ChartPanel chartPanel = new ChartPanel(chart); panel.add(chartPanel); JFrame frame = new JFrame(); frame.setContentPane(panel); frame.validate(); Dimension d = new Dimension(800, 500); frame.setSize(d); frame.setVisible(true); try { Thread.sleep(3000); } catch (InterruptedException e) { e.printStackTrace(); } System.out.println("starting"); for (int lambda = 0; lambda <= 100; ++lambda) { fi.setLambda(lambda / 100.0); fi.fitFunction(pointsQ); System.out.println(fi.interpolatedFunction); dataset.getSeries(2).clear(); for (double x = -5.0; x <= 5.0; x = x + 0.5) seriesI.add(x, fi.predict(x)); try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } // makeScreenshot( lambda ); } }
From source file:iac.cnr.it.TestSearcher.java
public static void main(String[] args) throws IOException, ParseException { /** Command line parser and options */ CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption(OPT_INDEX, true, "Index path"); options.addOption(OPT_QUERY, true, "The query"); CommandLine cmd = null;/*from w w w . ja v a 2 s . c o m*/ try { cmd = parser.parse(options, args); } catch (org.apache.commons.cli.ParseException e) { logger.fatal("Error while parsing command line arguments"); System.exit(1); } /** Check for mandatory options */ if (!cmd.hasOption(OPT_INDEX) || !cmd.hasOption(OPT_QUERY)) { usage(); System.exit(0); } /** Read options */ File casePath = new File(cmd.getOptionValue(OPT_INDEX)); String query = cmd.getOptionValue(OPT_QUERY); /** Check correctness of the path containing an ISODAC case */ if (!casePath.exists() || !casePath.isDirectory()) { logger.fatal("The case directory \"" + casePath.getAbsolutePath() + "\" is not valid"); System.exit(1); } /** Check existance of the info.dat file */ File infoFile = new File(casePath, INFO_FILENAME); if (!infoFile.exists()) { logger.fatal("Can't find " + INFO_FILENAME + " within the case directory (" + casePath + ")"); System.exit(1); } /** Load the mapping image_uuid --> image_filename */ imagesMap = new HashMap<Integer, String>(); BufferedReader reader = new BufferedReader(new FileReader(infoFile)); while (reader.ready()) { String line = reader.readLine(); logger.info("Read the line: " + line); String currentID = line.split("\t")[0]; String currentImgFile = line.split("\t")[1]; imagesMap.put(Integer.parseInt(currentID), currentImgFile); logger.info("ID: " + currentID + " - IMG: " + currentImgFile + " added to the map"); } reader.close(); /** Load all the directories containing an index */ ArrayList<String> indexesDirs = new ArrayList<String>(); for (File f : casePath.listFiles()) { logger.info("Analyzing: " + f); if (f.isDirectory()) indexesDirs.add(f.getAbsolutePath()); } logger.info(indexesDirs.size() + " directories found!"); /** Set-up the searcher */ Searcher searcher = null; try { String[] array = indexesDirs.toArray(new String[indexesDirs.size()]); searcher = new Searcher(array); TopDocs results = searcher.search(query, Integer.MAX_VALUE); ScoreDoc[] hits = results.scoreDocs; int numTotalHits = results.totalHits; System.out.println(numTotalHits + " total matching documents"); for (int i = 0; i < numTotalHits; i++) { Document doc = searcher.doc(hits[i].doc); String path = doc.get(FIELD_PATH); String filename = doc.get(FIELD_FILENAME); String image_uuid = doc.get(FIELD_IMAGE_ID); if (path != null) { //System.out.println((i + 1) + ". " + path + File.separator + filename + " - score: " + hits[i].score); // System.out.println((i + 1) + ". " + path + File.separator + filename + " - image_file: " + image_uuid); System.out.println((i + 1) + ". " + path + File.separator + filename + " - image_file: " + imagesMap.get(Integer.parseInt(image_uuid))); } else { System.out.println((i + 1) + ". " + "No path for this document"); } } } catch (Exception e) { System.err.println("An error occurred: " + e.getMessage()); e.printStackTrace(); } finally { if (searcher != null) searcher.close(); } }
From source file:de.prozesskraft.pkraft.Clone.java
public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException { /*---------------------------- get options from ini-file// ww w . j a v a 2s . c o m ----------------------------*/ java.io.File inifile = new java.io.File( WhereAmI.getInstallDirectoryAbsolutePath(Clone.class) + "/" + "../etc/pkraft-clone.ini"); if (inifile.exists()) { try { ini = new Ini(inifile); } catch (InvalidFileFormatException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } else { System.err.println("ini file does not exist: " + inifile.getAbsolutePath()); System.exit(1); } /*---------------------------- create boolean options ----------------------------*/ Option ohelp = new Option("help", "print this message"); Option ov = new Option("v", "prints version and build-date"); /*---------------------------- create argument options ----------------------------*/ Option oinstance = OptionBuilder.withArgName("File").hasArg() .withDescription("[mandatory] process you want to clone.") // .isRequired() .create("instance"); Option obasedir = OptionBuilder.withArgName("DIR").hasArg().withDescription( "[optional, default: <basedirOfInstance>] base directory you want to place the root directory of the clone. this directory must exist at call time.") // .isRequired() .create("basedir"); /*---------------------------- create options object ----------------------------*/ Options options = new Options(); options.addOption(ohelp); options.addOption(ov); options.addOption(oinstance); options.addOption(obasedir); /*---------------------------- create the parser ----------------------------*/ CommandLineParser parser = new GnuParser(); // parse the command line arguments commandline = parser.parse(options, args); /*---------------------------- usage/help ----------------------------*/ if (commandline.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("clone", options); System.exit(0); } if (commandline.hasOption("v")) { System.out.println("author: alexander.vogel@prozesskraft.de"); System.out.println("version: [% version %]"); System.out.println("date: [% date %]"); System.exit(0); } /*---------------------------- ueberpruefen ob eine schlechte kombination von parametern angegeben wurde ----------------------------*/ if (!(commandline.hasOption("instance"))) { System.err.println("option -instance is mandatory"); exiter(); } /*---------------------------- die lizenz ueberpruefen und ggf abbrechen ----------------------------*/ // check for valid license ArrayList<String> allPortAtHost = new ArrayList<String>(); allPortAtHost.add(ini.get("license-server", "license-server-1")); allPortAtHost.add(ini.get("license-server", "license-server-2")); allPortAtHost.add(ini.get("license-server", "license-server-3")); MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1"); // lizenz-logging ausgeben for (String actLine : (ArrayList<String>) lic.getLog()) { System.err.println(actLine); } // abbruch, wenn lizenz nicht valide if (!lic.isValid()) { System.exit(1); } /*---------------------------- die eigentliche business logic ----------------------------*/ String pathToInstance = commandline.getOptionValue("instance"); java.io.File fileInstance = new java.io.File(pathToInstance); java.io.File fileBaseDir = null; // wenn es nicht vorhanden ist, dann mit fehlermeldung abbrechen if (!fileInstance.exists()) { System.err.println("instance file does not exist."); exiter(); } // testen ob eventuell vorhandene angaben basedir if (commandline.hasOption("basedir")) { fileBaseDir = new java.io.File(commandline.getOptionValue("basedir")); if (!fileBaseDir.exists()) { System.err.println("error: -basedir: directory does not exist"); exiter(); } if (!fileBaseDir.isDirectory()) { System.err.println("error: -basedir: is not a directory"); exiter(); } } // den main-prozess trotzdem nochmal einlesen um subprozesse extrahieren zu koennen Process p1 = new Process(); p1.setInfilebinary(pathToInstance); Process process = p1.readBinary(); // directories setzen, falls angegeben if (fileBaseDir != null) { process.setBaseDir(fileBaseDir.getCanonicalPath()); } // den main-prozess ueber die static function klonen Process clonedProcess = cloneProcess(process, null); // alle steps durchgehen und falls subprocesses existieren auch fuer diese ein cloning durchfuehren for (Step actStep : process.getStep()) { if (actStep.getSubprocess() != null) { Process pDummy = new Process(); pDummy.setInfilebinary(actStep.getAbsdir() + "/process.pmb"); Process processInSubprocess = pDummy.readBinary(); // System.err.println("info: reading process freshly from file: " + actStep.getAbsdir() + "/process.pmb"); if (processInSubprocess != null) { cloneProcess(processInSubprocess, clonedProcess); } } } }
From source file:edu.oregonstate.eecs.mcplan.domains.yahtzee2.subtask.StraightMdp.java
public static void main(final String[] argv) throws FileNotFoundException { // final StraightMdp M = new StraightMdp( false ); // final YahtzeeDiceState s = new YahtzeeDiceState( new Hand( new int[] { 0, 2, 2, 1, 0, 0 } ), 1 ); // final KeepAction a = new KeepAction( new int[] { 0, 1, 1, 0, 0, 0 } ); ///*from w ww . j a v a 2 s . c o m*/ // final Pair<ArrayList<YahtzeeDiceState>, ArrayList<Double>> P = M.sparseP( s, a ); // // for( int i = 0; i < P.first.size(); ++i ) { // System.out.println( P.first.get( i ) + " (" + P.second.get( i ) + ")" ); // } final RandomGenerator rng = new MersenneTwister(42); final double discount = 1.0; final boolean small = false; final StraightMdp mdp = new StraightMdp(small); final int Nfeatures = Hand.Nfaces + 1; // +1 for rerolls final SparseValueIterationSolver<YahtzeeDiceState, YahtzeeAction> vi = new SparseValueIterationSolver<YahtzeeDiceState, YahtzeeAction>( mdp, discount, 1e-16); vi.run(); final ArrayList<Attribute> attr = new ArrayList<Attribute>(); attr.addAll(YahtzeeSubtaskStateSpace.attributes()); attr.add(WekaUtil.createNominalAttribute("__label__", mdp.A().cardinality())); final Instances instances = WekaUtil .createEmptyInstances("yahtzee_straight_" + (small ? "small" : "large") + "_pistar", attr); final Policy<YahtzeeDiceState, YahtzeeAction> pistar = vi.pistar(); final Generator<YahtzeeDiceState> g = mdp.S().generator(); while (g.hasNext()) { final YahtzeeDiceState s = g.next(); if (s.isTerminal()) { continue; } pistar.setState(s, 0L); final YahtzeeAction astar = pistar.getAction(); System.out.println("" + s + " -> " + astar); final double[] phi = new double[Nfeatures + 1]; int idx = 0; for (int i = 0; i < Hand.Nfaces; ++i) { phi[idx++] = s.hand.dice[i]; } phi[idx++] = s.rerolls; phi[Nfeatures] = mdp.A().index(astar); WekaUtil.addInstance(instances, new DenseInstance(1.0, phi)); } WekaUtil.writeDataset(new File("."), instances); final Csv.Writer csv = new Csv.Writer( new PrintStream(new FileOutputStream(new File(instances.relationName() + "_action-key.csv")))); for (final Map.Entry<ValueType<int[]>, Integer> e : YahtzeeSubtaskActionSpace.index_map.entrySet()) { csv.cell(e.getValue()).cell(new KeepAction(e.getKey().get())).newline(); } // final MeanVarianceAccumulator ret = new MeanVarianceAccumulator(); // final MeanVarianceAccumulator steps = new MeanVarianceAccumulator(); // final int Ngames = 100000; // for( int i = 0; i < Ngames; ++i ) { // final FuelWorldState s0; // if( choices ) { // s0 = FuelWorldState.createDefaultWithChoices( rng ); // } // else { // s0 = FuelWorldState.createDefault( rng ); // } // final FuelWorldSimulator sim = new FuelWorldSimulator( s0 ); // // final Episode<FuelWorldState, FuelWorldAction> episode // = new Episode<FuelWorldState, FuelWorldAction>( sim, JointPolicy.create( pistar ) ); // final RewardAccumulator<FuelWorldState, FuelWorldAction> racc // = new RewardAccumulator<FuelWorldState, FuelWorldAction>( sim.nagents(), discount ); // episode.addListener( racc ); // // final long tstart = System.nanoTime(); // episode.run(); // final long tend = System.nanoTime(); // final double elapsed_ms = (tend - tstart) * 1e-6; // // ret.add( racc.v()[0] ); // steps.add( racc.steps() ); // } // // System.out.println( "****************************************" ); // System.out.println( "Average return: " + ret.mean() ); // System.out.println( "Return variance: " + ret.variance() ); // System.out.println( "Confidence: " + ret.confidence() ); // System.out.println( "Steps (mean): " + steps.mean() ); // System.out.println( "Steps (var): " + steps.variance() ); }
From source file:edu.harvard.i2b2.explorer.dataModel.PDORequestMessageModel.java
public static void main(String[] args) throws Exception { PDORequestMessageModel pdoFactory = new PDORequestMessageModel(); String conceptPath = new String( "\\RPDR\\Labtests\\LAB\\(LLB16) Chemistry\\(LLB21) General Chemistries\\CA"); ArrayList<String> paths = new ArrayList<String>(); paths.add(conceptPath); conceptPath = new String("\\RPDR\\Labtests\\LAB\\(LLB16) Chemistry\\(LLB21) General Chemistries\\GGT"); paths.add(conceptPath);/*www . j av a 2 s. com*/ ArrayList<String> ppaths = new ArrayList<String>(); conceptPath = new String("\\Providers\\BWH"); ppaths.add(conceptPath); pdoFactory.requestXmlMessage(null, "1545", new Integer(0), new Integer(10), false, ""); }
From source file:akori.AKORI.java
public static void main(String[] args) throws IOException, InterruptedException { System.out.println("esto es AKORI"); URL = "http://www.mbauchile.cl"; PATH = "E:\\NetBeansProjects\\AKORI\\"; NAME = "mbauchile.png"; // Extrar DOM tree Document doc = Jsoup.connect(URL).timeout(0).get(); // The Firefox driver supports javascript WebDriver driver = new FirefoxDriver(); driver.manage().window().maximize(); System.out.println(driver.manage().window().getSize().toString()); System.out.println(driver.manage().window().getPosition().toString()); int xmax = driver.manage().window().getSize().width; int ymax = driver.manage().window().getSize().height; // Go to the URL page driver.get(URL);//from w ww.j av a2 s.co m File screen = ((TakesScreenshot) driver).getScreenshotAs(OutputType.FILE); FileUtils.copyFile(screen, new File(PATH + NAME)); BufferedImage img = ImageIO.read(new File(PATH + NAME)); //Graphics2D graph = img.createGraphics(); BufferedImage img1 = new BufferedImage(xmax, ymax, BufferedImage.TYPE_INT_ARGB); Graphics2D graph1 = img.createGraphics(); double[][] matrix = new double[ymax][xmax]; BufferedReader in = new BufferedReader(new FileReader("et.txt")); String linea; double max = 0; graph1.drawImage(img, 0, 0, null); HashMap<String, Integer> lista = new HashMap<String, Integer>(); int count = 0; for (int i = 0; (linea = in.readLine()) != null && i < 10000; ++i) { String[] datos = linea.split(","); int x = (int) Double.parseDouble(datos[0]); int y = (int) Double.parseDouble(datos[2]); long time = Double.valueOf(datos[4]).longValue(); if (x >= xmax || y >= ymax) continue; if (time < 691215) continue; if (time > 705648) break; if (lista.containsKey(x + "," + y)) lista.put(x + "," + y, lista.get(x + "," + y) + 1); else lista.put(x + "," + y, 1); ++count; } System.out.println(count); in.close(); Iterator iter = lista.entrySet().iterator(); Map.Entry e; for (String key : lista.keySet()) { Integer i = lista.get(key); if (max < i) max = i; } System.out.println(max); max = 0; while (iter.hasNext()) { e = (Map.Entry) iter.next(); String xy = (String) e.getKey(); String[] datos = xy.split(","); int x = Integer.parseInt(datos[0]); int y = Integer.parseInt(datos[1]); matrix[y][x] += (int) e.getValue(); double aux; if ((aux = normalMatrix(matrix, y, x, ((int) e.getValue()) * 4)) > max) { max = aux; } //normalMatrix(matrix,x,y,20); if (matrix[y][x] > max) max = matrix[y][x]; } int A, R, G, B, n; for (int i = 0; i < xmax; ++i) { for (int j = 0; j < ymax; ++j) { if (matrix[j][i] != 0) { n = (int) Math.round(matrix[j][i] * 100 / max); R = Math.round((255 * n) / 100); G = Math.round((255 * (100 - n)) / 100); B = 0; A = Math.round((255 * n) / 100); ; if (R > 255) R = 255; if (R < 0) R = 0; if (G > 255) G = 255; if (G < 0) G = 0; if (R < 50) A = 0; graph1.setColor(new Color(R, G, B, A)); graph1.fillOval(i, j, 1, 1); } } } //graph1.dispose(); ImageIO.write(img, "png", new File("example.png")); System.out.println(max); graph1.setColor(Color.RED); // Extraer elementos Elements e1 = doc.body().getAllElements(); int i = 1; ArrayList<String> tags = new ArrayList<String>(); for (Element temp : e1) { if (tags.indexOf(temp.tagName()) == -1) { tags.add(temp.tagName()); List<WebElement> query = driver.findElements(By.tagName(temp.tagName())); for (WebElement temp1 : query) { Point po = temp1.getLocation(); Dimension d = temp1.getSize(); if (d.width <= 0 || d.height <= 0 || po.x < 0 || po.y < 0) continue; System.out.println(i + " " + temp.nodeName()); System.out.println(" x: " + po.x + " y: " + po.y); System.out.println(" width: " + d.width + " height: " + d.height); graph1.draw(new Rectangle(po.x, po.y, d.width, d.height)); ++i; } } } graph1.dispose(); ImageIO.write(img, "png", new File(PATH + NAME)); driver.quit(); }
From source file:deck36.storm.plan9.php.RecordBreakerBadgeTopology.java
public static void main(String[] args) throws Exception { String env = null;// w w w . ja va 2 s . co m if (args != null && args.length > 0) { env = args[0]; } if (!"dev".equals(env)) if (!"prod".equals(env)) { System.out.println("Usage: $0 (dev|prod)\n"); System.exit(1); } // Topology config Config conf = new Config(); // Load parameters and add them to the Config Map configMap = YamlLoader.loadYamlFromResource("config_" + env + ".yml"); conf.putAll(configMap); log.info(JSONValue.toJSONString((conf))); // Set topology loglevel to DEBUG conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug")); // Create Topology builder TopologyBuilder builder = new TopologyBuilder(); // if there are not special reasons, start with parallelism hint of 1 // and multiple tasks. By that, you can scale dynamically later on. int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint"); int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks"); // Create Stream from RabbitMQ messages // bind new queue with name of the topology // to the main plan9 exchange (from properties config) // consuming only CBT-related events by using the rounting key 'cbt.#' String badgeName = RecordBreakerBadgeTopology.class.getSimpleName(); String rabbitQueueName = badgeName; // use topology class name as name for the queue String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.rabbitmq.exchange"); String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.rabbitmq.routing_key"); // Get JSON deserialization scheme Scheme rabbitScheme = new SimpleJSONScheme(); // Setup a Declarator to configure exchange/queue/routing key RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName, rabbitRoutingKey); // Create Configuration for the Spout ConnectionConfig connectionConfig = new ConnectionConfig( (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat")); ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName) .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail() .build(); // add global parameters to topology config - the RabbitMQSpout will read them from there conf.putAll(spoutConfig.asMap()); // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md if ("prod".equals(env)) { conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")); } // Add RabbitMQ spout to topology builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint) .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks")); // construct command to invoke the external bolt implementation ArrayList<String> command = new ArrayList(15); // Add main execution program (php, hhvm, zend, ..) and parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.php.executor")); command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.executor_params")); // Add main command to be executed (app/console, the phar file, etc.) and global context parameters (environment etc.) command.add((String) JsonPath.read(conf, "$.deck36_storm.php.main")); command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.main_params")); // create command to execute the RecordBreakerBolt ArrayList<String> recordBreakerBoltCommand = new ArrayList<String>(command); // Add main route to be invoked and its parameters recordBreakerBoltCommand.add((String) JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.main")); List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.params"); if (boltParams != null) recordBreakerBoltCommand.addAll(boltParams); // create command to execute the RecordMasterBolt ArrayList<String> recordMasterBoltCommand = new ArrayList<String>(command); // Add main route to be invoked and its parameters recordMasterBoltCommand.add((String) JsonPath.read(conf, "$.deck36_storm.RecordMasterBolt.main")); boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.RecordMasterBolt.params"); if (boltParams != null) recordMasterBoltCommand.addAll(boltParams); // Log the final commands log.info("Command to start bolt for RecordBreaker badge: " + Arrays.toString(recordBreakerBoltCommand.toArray())); log.info("Command to start bolt for RecordMaster badge: " + Arrays.toString(recordMasterBoltCommand.toArray())); // Add constructed external bolt command to topology using MultilangAdapterBolt // The RecordBreaker reads the incoming messages from the game application, i.e. the "incoming" spout builder.setBolt("record_breaker", new MultilangAdapterBolt(recordBreakerBoltCommand, "badge"), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("incoming"); // The RecordMaster reads the badge messages generated by the RecordBreakerBolt builder.setBolt("record_master", new MultilangAdapterBolt(recordMasterBoltCommand, "badge"), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("record_breaker"); // the RabbitMQ router bolt can read messages from both, RecordBreakerBolt and RecordMasterBolt, // and forward those messages to the broker builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt( (String) JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.rabbitmq.target_exchange"), "RecordBreakerMaster" // RabbitMQ routing key ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("record_breaker") .shuffleGrouping("record_master"); builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks) .shuffleGrouping("rabbitmq_router"); if ("dev".equals(env)) { LocalCluster cluster = new LocalCluster(); cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology()); Thread.sleep(2000000); } if ("prod".equals(env)) { StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf, builder.createTopology()); } }