List of usage examples for java.util Map get
V get(Object key);
From source file:es.upm.dit.xsdinferencer.XSDInferencer.java
/** * Main method, executed when the tool is invoked as a standalone application * @param args an array with all the arguments passed to the application * @throws XSDConfigurationException if there is a problem regarding the configuration * @throws IOException if there is an I/O problem while reading the input XML files or writing the output files * @throws JDOMException if there is any problem while parsing the input XML files *//* ww w . j a va 2 s . c o m*/ public static void main(String[] args) throws Exception { if (Arrays.asList(args).contains("--help")) { printHelp(); System.exit(0); } try { XSDInferencer inferencer = new XSDInferencer(); Results results = inferencer.inferSchema(args); Map<String, String> xsdsAsXMLStrings = results.getXSDsAsStrings(); Map<String, String> jsonsAsStrings = results.getJsonSchemasAsStrings(); Map<String, String> schemasAsStrings = xsdsAsXMLStrings != null ? xsdsAsXMLStrings : jsonsAsStrings; Map<String, String> statisticsDocumentsAsXMLStrings = results.getStatisticsAsStrings(); File outputDirectory = null; for (int i = 0; i < args.length; i++) { if (!args[i].equalsIgnoreCase("--" + KEY_OUTPUT_DIRECTORY)) continue; if (args[i + 1].startsWith("--") || i == args.length - 1) throw new IllegalArgumentException("Output directory parameter bad specified"); outputDirectory = new File(args[i + 1]); if (!outputDirectory.exists()) throw new FileNotFoundException("Output directory not found."); if (!outputDirectory.isDirectory()) throw new NotDirectoryException(outputDirectory.getPath()); } if (outputDirectory != null) { System.out.println("Writing results to " + outputDirectory.getAbsolutePath()); for (String name : schemasAsStrings.keySet()) { File currentOutpuFile = new File(outputDirectory, name); FileOutputStream fOs = new FileOutputStream(currentOutpuFile); BufferedWriter bWriter = new BufferedWriter(new OutputStreamWriter(fOs, Charsets.UTF_8)); bWriter.write(schemasAsStrings.get(name)); bWriter.flush(); bWriter.close(); } if (statisticsDocumentsAsXMLStrings != null) { for (String name : statisticsDocumentsAsXMLStrings.keySet()) { File currentOutpuFile = new File(outputDirectory, name); FileWriter fWriter = new FileWriter(currentOutpuFile); BufferedWriter bWriter = new BufferedWriter(fWriter); bWriter.write(statisticsDocumentsAsXMLStrings.get(name)); bWriter.flush(); bWriter.close(); } } System.out.println("Results written"); } else { for (String name : schemasAsStrings.keySet()) { System.out.println(name + ":"); System.out.println(schemasAsStrings.get(name)); System.out.println(); } if (statisticsDocumentsAsXMLStrings != null) { for (String name : statisticsDocumentsAsXMLStrings.keySet()) { System.out.println(name + ":"); System.out.println(statisticsDocumentsAsXMLStrings.get(name)); System.out.println(); } } } } catch (XSDInferencerException e) { System.err.println(); System.err.println("Error at inference proccess: " + e.getMessage()); e.printStackTrace(); System.exit(1); } }
From source file:de.tu_berlin.dima.oligos.Oligos.java
public static void main(String[] args) throws TypeNotSupportedException { BasicConfigurator.configure();//from w w w .j a v a 2 s . c o m // TODO create cmdline option for setting logger level Logger.getRootLogger().setLevel(Level.INFO); CommandLineInterface cli = new CommandLineInterface(args); try { // TODO hard exit if the parsing fails! // better catch exceptions and log them if (!cli.parse()) { System.exit(2); } Properties props = new Properties(); props.setProperty("user", cli.getUsername()); props.setProperty("password", cli.getPassword()); Connection connection = DriverManager.getConnection(cli.getConnectionString(), props); JdbcConnector jdbcConnector = new JdbcConnector(connection); MetaConnector metaConnector = null; Driver dbDriver = cli.dbDriver; switch (dbDriver.driverName) { case db2: LOGGER.trace("metaConnector = Db2MetaConnector"); metaConnector = new Db2MetaConnector(jdbcConnector); break; case oracle: metaConnector = new OracleMetaConnector(jdbcConnector); break; default: LOGGER.error("Unknown database driver. Supported drivers are: " + DriverName.values()); } // validating schema LOGGER.info("Validating input schema ..."); SparseSchema sparseSchema = cli.getInputSchema(); LOGGER.trace("User specified schema " + sparseSchema); DenseSchema inputSchema = DbUtils.populateSchema(sparseSchema, jdbcConnector, metaConnector); LOGGER.trace("Populated and validated schema " + inputSchema); // obtaining type information/ column meta data LOGGER.info("Retrieving column meta data ..."); Map<ColumnId, TypeInfo> columnTypes = Maps.newLinkedHashMap(); for (ColumnId columnId : inputSchema) { TypeInfo type = metaConnector.getColumnType(columnId); columnTypes.put(columnId, type); } // creating connectors and profilers LOGGER.info("Establashing database connection ..."); SchemaConnector schemaConnector = null; TableConnector tableConnector = null; switch (dbDriver.driverName) { case db2: schemaConnector = new Db2SchemaConnector(jdbcConnector); tableConnector = new Db2TableConnector(jdbcConnector); break; case oracle: schemaConnector = new OracleSchemaConnector(jdbcConnector); tableConnector = new OracleTableConnector(jdbcConnector); } Set<SchemaProfiler> profilers = Sets.newLinkedHashSet(); for (String schema : inputSchema.schemas()) { SchemaProfiler schemaProfiler = new SchemaProfiler(schema, schemaConnector); profilers.add(schemaProfiler); for (String table : inputSchema.tablesIn(schema)) { TableProfiler tableProfiler = new TableProfiler(schema, table, tableConnector); schemaProfiler.add(tableProfiler); for (String column : inputSchema.columnsIn(schema, table)) { ColumnId columnId = new ColumnId(schema, table, column); TypeInfo type = columnTypes.get(columnId); ColumnProfiler<?> columnProfiler = null; switch (dbDriver.driverName) { case db2: columnProfiler = getProfiler(schema, table, column, type, jdbcConnector, metaConnector); break; case oracle: columnProfiler = getProfilerOracle(schema, table, column, type, jdbcConnector, metaConnector); } tableProfiler.addColumnProfiler(columnProfiler); } } } // profiling statistical data LOGGER.info("Profiling schema ..."); Set<Schema> profiledSchemas = Sets.newLinkedHashSet(); for (SchemaProfiler schemaProfiler : profilers) { Schema profiledSchema = schemaProfiler.profile(); profiledSchemas.add(profiledSchema); } LOGGER.info("Generating generator specification ..."); File outputDir = cli.getOutputDirectory(); String generatorName = cli.getGeneratorName(); LOGGER.info("Writing generator specification ..."); for (Schema schema : profiledSchemas) { MyriadWriter writer = new MyriadWriter(schema, outputDir, generatorName); writer.write(); } LOGGER.info("Closing database connection ..."); connection.close(); } catch (SQLException e) { LOGGER.error(e.getLocalizedMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); } catch (IOException e) { LOGGER.error(e.getLocalizedMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); } catch (ParseException e) { LOGGER.error(e.getMessage()); cli.printHelpMessage(); } }
From source file:com.buddycloud.channeldirectory.cli.Main.java
@SuppressWarnings("static-access") public static void main(String[] args) throws Exception { JsonElement rootElement = new JsonParser().parse(new FileReader(QUERIES_FILE)); JsonArray rootArray = rootElement.getAsJsonArray(); Map<String, Query> queries = new HashMap<String, Query>(); for (int i = 0; i < rootArray.size(); i++) { JsonObject queryElement = rootArray.get(i).getAsJsonObject(); String queryName = queryElement.get("name").getAsString(); String type = queryElement.get("type").getAsString(); Query query = null;//from ww w. ja v a2 s. c o m if (type.equals("solr")) { query = new QueryToSolr(queryElement.get("agg").getAsString(), queryElement.get("core").getAsString(), queryElement.get("q").getAsString()); } else if (type.equals("dbms")) { query = new QueryToDBMS(queryElement.get("q").getAsString()); } queries.put(queryName, query); } LinkedList<String> queriesNames = new LinkedList<String>(queries.keySet()); Collections.sort(queriesNames); Options options = new Options(); options.addOption(OptionBuilder.isRequired(true).withLongOpt("query").hasArg(true) .withDescription("The name of the query. Possible queries are: " + queriesNames).create('q')); options.addOption(OptionBuilder.isRequired(false).withLongOpt("args").hasArg(true) .withDescription("Arguments for the query").create('a')); options.addOption(new Option("?", "help", false, "Print this message")); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { printHelpAndExit(options); } if (cmd.hasOption("help")) { printHelpAndExit(options); } String queryName = cmd.getOptionValue("q"); String argsCmd = cmd.getOptionValue("a"); Properties configuration = ConfigurationUtils.loadConfiguration(); Query query = queries.get(queryName); if (query == null) { printHelpAndExit(options); } System.out.println(query.exec(argsCmd, configuration)); }
From source file:de.citec.csra.elancsv.parser.SimpleParser.java
public static void main(String[] args) throws IOException, ParseException { Options opts = new Options(); opts.addOption("file", true, "Tab-separated ELAN export file to load."); opts.addOption("tier", true, "Tier to analyze. Optional: Append ::num to interpret annotations numerically."); opts.addOption("format", true, "How to read information from the file name. %V -> participant, %A -> annoatator, %C -> condition, e.g. \"%V - %A\""); opts.addOption("help", false, "Print this help and exit"); CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(opts, args); if (cmd.hasOption("help")) { helpExit(opts, "where OPTION includes:"); }//from w w w. jav a 2s .c om String infile = cmd.getOptionValue("file"); if (infile == null) { helpExit(opts, "Error: no file given."); } String format = cmd.getOptionValue("format"); if (format == null) { helpExit(opts, "Error: no format given."); } String tier = cmd.getOptionValue("tier"); if (tier == null) { helpExit(opts, "Error: no tier given."); } // TODO count values in annotations (e.g. search all robot occurrences) String[] tn = tier.split("::"); boolean numeric = false; if (tn.length == 2 && tn[1].equals("num")) { numeric = true; tier = tn[0]; } format = "^" + format + "$"; format = format.replaceFirst("%V", "(?<V>.*?)"); format = format.replaceFirst("%A", "(?<A>.*?)"); format = format.replaceFirst("%C", "(?<C>.*?)"); Pattern pa = Pattern.compile(format); Map<String, Participant> participants = new HashMap<>(); BufferedReader br = new BufferedReader(new FileReader(infile)); String line; int lineno = 0; while ((line = br.readLine()) != null) { String[] parts = line.split("\t"); lineno++; if (parts.length < 5) { System.err.println("WARNING: line '" + lineno + "' too short '" + line + "'"); continue; } Annotation a = new Annotation(Long.valueOf(parts[ElanFormat.START.field]), Long.valueOf(parts[ElanFormat.STOP.field]), Long.valueOf(parts[ElanFormat.DURATION.field]), parts[ElanFormat.VALUE.field]); String tname = parts[ElanFormat.TIER.field]; String file = parts[ElanFormat.FILE.field].replaceAll(".eaf", ""); Matcher m = pa.matcher(file); String vp = file; String condition = "?"; String annotator = "?"; String participantID = vp; if (m.find()) { vp = m.group("V"); if (format.indexOf("<A>") > 0) { annotator = m.group("A"); } if (format.indexOf("<C>") > 0) { condition = m.group("C"); } } participantID = vp + ";" + annotator; if (!participants.containsKey(participantID)) { participants.put(participantID, new Participant(vp, condition, annotator)); } Participant p = participants.get(participantID); if (!p.tiers.containsKey(tname)) { p.tiers.put(tname, new Tier(tname)); } p.tiers.get(tname).annotations.add(a); } Map<String, Map<String, Number>> values = new HashMap<>(); Set<String> rownames = new HashSet<>(); String allCountKey = "c: all values"; String allDurationKey = "d: all values"; String allMeanKey = "m: all values"; for (Map.Entry<String, Participant> e : participants.entrySet()) { // System.out.println(e); Tier t = e.getValue().tiers.get(tier); String participantID = e.getKey(); if (!values.containsKey(participantID)) { values.put(participantID, new HashMap<String, Number>()); } Map<String, Number> row = values.get(participantID); //participant id if (t != null) { row.put(allCountKey, 0l); row.put(allDurationKey, 0l); row.put(allMeanKey, 0l); for (Annotation a : t.annotations) { long countAll = (long) row.get(allCountKey) + 1; long durationAll = (long) row.get(allDurationKey) + a.duration; long meanAll = durationAll / countAll; row.put(allCountKey, countAll); row.put(allDurationKey, durationAll); row.put(allMeanKey, meanAll); if (!numeric) { String countKey = "c: " + a.value; String durationKey = "d: " + a.value; String meanKey = "m: " + a.value; if (!row.containsKey(countKey)) { row.put(countKey, 0l); } if (!row.containsKey(durationKey)) { row.put(durationKey, 0l); } if (!row.containsKey(meanKey)) { row.put(meanKey, 0d); } long count = (long) row.get(countKey) + 1; long duration = (long) row.get(durationKey) + a.duration; double mean = duration * 1.0 / count; row.put(countKey, count); row.put(durationKey, duration); row.put(meanKey, mean); rownames.add(countKey); rownames.add(durationKey); rownames.add(meanKey); } else { String countKey = "c: " + t.name; String sumKey = "s: " + t.name; String meanKey = "m: " + t.name; if (!row.containsKey(countKey)) { row.put(countKey, 0l); } if (!row.containsKey(sumKey)) { row.put(sumKey, 0d); } if (!row.containsKey(meanKey)) { row.put(meanKey, 0d); } double d = 0; try { d = Double.valueOf(a.value); } catch (NumberFormatException ex) { } long count = (long) row.get(countKey) + 1; double sum = (double) row.get(sumKey) + d; double mean = sum / count; row.put(countKey, count); row.put(sumKey, sum); row.put(meanKey, mean); rownames.add(countKey); rownames.add(sumKey); rownames.add(meanKey); } } } } ArrayList<String> list = new ArrayList(rownames); Collections.sort(list); StringBuilder header = new StringBuilder("ID;Annotator;"); header.append(allCountKey); header.append(";"); header.append(allDurationKey); header.append(";"); header.append(allMeanKey); header.append(";"); for (String l : list) { header.append(l); header.append(";"); } System.out.println(header); for (Map.Entry<String, Map<String, Number>> e : values.entrySet()) { StringBuilder row = new StringBuilder(e.getKey()); row.append(";"); if (e.getValue().containsKey(allCountKey)) { row.append(e.getValue().get(allCountKey)); } else { row.append("0"); } row.append(";"); if (e.getValue().containsKey(allDurationKey)) { row.append(e.getValue().get(allDurationKey)); } else { row.append("0"); } row.append(";"); if (e.getValue().containsKey(allMeanKey)) { row.append(e.getValue().get(allMeanKey)); } else { row.append("0"); } row.append(";"); for (String l : list) { if (e.getValue().containsKey(l)) { row.append(e.getValue().get(l)); } else { row.append("0"); } row.append(";"); } System.out.println(row); } }
From source file:eu.annocultor.converters.geonames.GeonamesDumpToRdf.java
public static void main(String[] args) throws Exception { File root = new File("input_source"); // load country-continent match countryToContinent//from w w w. j av a 2s .co m .load((new GeonamesDumpToRdf()).getClass().getResourceAsStream("/country-to-continent.properties")); // creating files Map<String, BufferedWriter> files = new HashMap<String, BufferedWriter>(); Map<String, Boolean> started = new HashMap<String, Boolean>(); for (Object string : countryToContinent.keySet()) { String continent = countryToContinent.getProperty(string.toString()); File dir = new File(root, continent); if (!dir.exists()) { dir.mkdir(); } files.put(string.toString(), new BufferedWriter(new OutputStreamWriter( new FileOutputStream(new File(root, continent + "/" + string + ".rdf")), "UTF-8"))); System.out.println(continent + "/" + string + ".rdf"); started.put(string.toString(), false); } System.out.println(started); Pattern countryPattern = Pattern .compile("<inCountry rdf\\:resource\\=\"http\\://www\\.geonames\\.org/countries/\\#(\\w\\w)\"/>"); long counter = 0; LineIterator it = FileUtils.lineIterator(new File(root, "all-geonames-rdf.txt"), "UTF-8"); try { while (it.hasNext()) { String text = it.nextLine(); if (text.startsWith("http://sws.geonames")) continue; // progress counter++; if (counter % 100000 == 0) { System.out.print("*"); } // System.out.println(counter); // get country String country = null; Matcher matcher = countryPattern.matcher(text); if (matcher.find()) { country = matcher.group(1); } // System.out.println(country); if (country == null) country = "null"; text = text.replace("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><rdf:RDF", "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?><rdf:RDF"); if (started.get(country) == null) throw new Exception("Unknow country " + country); if (started.get(country).booleanValue()) { // remove RDF opening text = text.substring(text.indexOf("<rdf:RDF ")); text = text.substring(text.indexOf(">") + 1); } // remove RDF ending text = text.substring(0, text.indexOf("</rdf:RDF>")); files.get(country).append(text + "\n"); if (!started.get(country).booleanValue()) { // System.out.println("Started with country " + country); } started.put(country, true); } } finally { LineIterator.closeQuietly(it); } for (Object string : countryToContinent.keySet()) { boolean hasStarted = started.get(string.toString()).booleanValue(); if (hasStarted) { BufferedWriter bf = files.get(string.toString()); bf.append("</rdf:RDF>"); bf.flush(); bf.close(); } } return; }
From source file:com.github.fritaly.graphml4j.samples.GradleDependenciesWithGroups.java
public static void main(String[] args) throws Exception { if (args.length != 1) { System.out// w w w. j ava 2s. c o m .println(String.format("%s <output-file>", GradleDependenciesWithGroups.class.getSimpleName())); System.exit(1); } final File file = new File(args[0]); System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ..."); FileWriter fileWriter = null; GraphMLWriter graphWriter = null; Reader reader = null; LineNumberReader lineReader = null; try { fileWriter = new FileWriter(file); graphWriter = new GraphMLWriter(fileWriter); // Customize the rendering of nodes final NodeStyle nodeStyle = graphWriter.getNodeStyle(); nodeStyle.setWidth(250.0f); nodeStyle.setHeight(50.0f); graphWriter.setNodeStyle(nodeStyle); // The dependency graph has been generated by Gradle with the // command "gradle dependencies". The output of this command has // been saved to a text file which will be parsed to rebuild the // dependency graph reader = new InputStreamReader( GradleDependenciesWithGroups.class.getResourceAsStream("gradle-dependencies.txt")); lineReader = new LineNumberReader(reader); String line = null; // Stack containing the artifacts per depth inside the dependency // graph (the topmost dependency is the first one in the stack) final Stack<Artifact> stack = new Stack<Artifact>(); final Map<String, Set<Artifact>> artifactsByGroup = new HashMap<String, Set<Artifact>>(); // List of parent/child relationships between artifacts final List<Relationship> relationships = new ArrayList<Relationship>(); while ((line = lineReader.readLine()) != null) { // Determine the depth of the current dependency inside the // graph. The depth can be inferred from the indentation used by // Gradle. Each level of depth adds 5 more characters of // indentation final int initialLength = line.length(); // Remove the strings used by Gradle to indent dependencies line = StringUtils.replace(line, "+--- ", ""); line = StringUtils.replace(line, "| ", ""); line = StringUtils.replace(line, "\\--- ", ""); line = StringUtils.replace(line, " ", ""); // The depth can easily be inferred now final int depth = (initialLength - line.length()) / 5; // Remove unnecessary artifacts while (depth <= stack.size()) { stack.pop(); } // Create an artifact from the dependency (group, artifact, // version) tuple final Artifact artifact = createArtifact(line); stack.push(artifact); if (stack.size() > 1) { // Store the artifact and its parent relationships.add(new Relationship(stack.get(stack.size() - 2), artifact)); } if (!artifactsByGroup.containsKey(artifact.group)) { artifactsByGroup.put(artifact.group, new HashSet<Artifact>()); } artifactsByGroup.get(artifact.group).add(artifact); } // Open the graph graphWriter.graph(); final Map<Artifact, String> nodeIdsByArtifact = new HashMap<Artifact, String>(); // Loop over the groups and generate the associated nodes for (String group : artifactsByGroup.keySet()) { graphWriter.group(group, true); for (Artifact artifact : artifactsByGroup.get(group)) { final String nodeId = graphWriter.node(artifact.getLabel()); nodeIdsByArtifact.put(artifact, nodeId); } graphWriter.closeGroup(); } // Generate the edges for (Relationship relationship : relationships) { final String parentId = nodeIdsByArtifact.get(relationship.parent); final String childId = nodeIdsByArtifact.get(relationship.child); graphWriter.edge(parentId, childId); } // Close the graph graphWriter.closeGraph(); System.out.println("Done"); } finally { // Calling GraphMLWriter.close() is necessary to dispose the underlying resources graphWriter.close(); fileWriter.close(); lineReader.close(); reader.close(); } }
From source file:com.mmounirou.spotirss.SpotiRss.java
/** * @param args//from w ww .j a va 2 s . com * @throws IOException * @throws ClassNotFoundException * @throws IllegalAccessException * @throws InstantiationException * @throws SpotifyClientException * @throws ChartRssException * @throws SpotifyException */ public static void main(String[] args) throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException, SpotifyClientException { if (args.length == 0) { System.err.println("usage : java -jar spotiboard.jar <charts-folder>"); return; } Properties connProperties = new Properties(); InputStream inStream = SpotiRss.class.getResourceAsStream("/spotify-server.properties"); try { connProperties.load(inStream); } finally { IOUtils.closeQuietly(inStream); } String host = connProperties.getProperty("host"); int port = Integer.parseInt(connProperties.getProperty("port")); String user = connProperties.getProperty("user"); final SpotifyClient spotifyClient = new SpotifyClient(host, port, user); final Map<String, Playlist> playlistsByTitle = getPlaylistsByTitle(spotifyClient); final File outputDir = new File(args[0]); outputDir.mkdirs(); TrackCache cache = new TrackCache(); try { for (String strProvider : PROVIDERS) { String providerClassName = EntryToTrackConverter.class.getPackage().getName() + "." + StringUtils.capitalize(strProvider); final EntryToTrackConverter converter = (EntryToTrackConverter) SpotiRss.class.getClassLoader() .loadClass(providerClassName).newInstance(); Iterable<String> chartsRss = getCharts(strProvider); final File resultDir = new File(outputDir, strProvider); resultDir.mkdir(); final SpotifyHrefQuery hrefQuery = new SpotifyHrefQuery(cache); Iterable<String> results = FluentIterable.from(chartsRss).transform(new Function<String, String>() { @Override @Nullable public String apply(@Nullable String chartRss) { try { long begin = System.currentTimeMillis(); ChartRss bilboardChartRss = ChartRss.getInstance(chartRss, converter); Map<Track, String> trackHrefs = hrefQuery.getTrackHrefs(bilboardChartRss.getSongs()); String strTitle = bilboardChartRss.getTitle(); File resultFile = new File(resultDir, strTitle); List<String> lines = Lists.newLinkedList(FluentIterable.from(trackHrefs.keySet()) .transform(Functions.toStringFunction())); lines.addAll(trackHrefs.values()); FileUtils.writeLines(resultFile, Charsets.UTF_8.displayName(), lines); Playlist playlist = playlistsByTitle.get(strTitle); if (playlist != null) { playlist.getTracks().clear(); playlist.getTracks().addAll(trackHrefs.values()); spotifyClient.patch(playlist); LOGGER.info(String.format("%s chart exported patched", strTitle)); } LOGGER.info(String.format("%s chart exported in %s in %d s", strTitle, resultFile.getAbsolutePath(), (int) TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - begin))); } catch (Exception e) { LOGGER.error(String.format("fail to export %s charts", chartRss), e); } return ""; } }); // consume iterables Iterables.size(results); } } finally { cache.close(); } }
From source file:com.amazonaws.services.kinesis.leases.impl.LeaseCoordinatorExerciser.java
public static void main(String[] args) throws InterruptedException, DependencyException, InvalidStateException, ProvisionedThroughputException, IOException { int numCoordinators = 9; int numLeases = 73; int leaseDurationMillis = 10000; int epsilonMillis = 100; AWSCredentialsProvider creds = new DefaultAWSCredentialsProviderChain(); AmazonDynamoDBClient ddb = new AmazonDynamoDBClient(creds); ILeaseManager<KinesisClientLease> leaseManager = new KinesisClientLeaseManager("nagl_ShardProgress", ddb); if (leaseManager.createLeaseTableIfNotExists(10L, 50L)) { LOG.info("Waiting for newly created lease table"); if (!leaseManager.waitUntilLeaseTableExists(10, 300)) { LOG.error("Table was not created in time"); return; }/* w w w. j av a 2s. c o m*/ } CWMetricsFactory metricsFactory = new CWMetricsFactory(creds, "testNamespace", 30 * 1000, 1000); final List<LeaseCoordinator<KinesisClientLease>> coordinators = new ArrayList<LeaseCoordinator<KinesisClientLease>>(); for (int i = 0; i < numCoordinators; i++) { String workerIdentifier = "worker-" + Integer.toString(i); LeaseCoordinator<KinesisClientLease> coord = new LeaseCoordinator<KinesisClientLease>(leaseManager, workerIdentifier, leaseDurationMillis, epsilonMillis, metricsFactory); coordinators.add(coord); } leaseManager.deleteAll(); for (int i = 0; i < numLeases; i++) { KinesisClientLease lease = new KinesisClientLease(); lease.setLeaseKey(Integer.toString(i)); lease.setCheckpoint(new ExtendedSequenceNumber("checkpoint")); leaseManager.createLeaseIfNotExists(lease); } final JFrame frame = new JFrame("Test Visualizer"); frame.setPreferredSize(new Dimension(800, 600)); final JPanel panel = new JPanel(new GridLayout(coordinators.size() + 1, 0)); final JLabel ticker = new JLabel("tick"); panel.add(ticker); frame.getContentPane().add(panel); final Map<String, JLabel> labels = new HashMap<String, JLabel>(); for (final LeaseCoordinator<KinesisClientLease> coord : coordinators) { JPanel coordPanel = new JPanel(); coordPanel.setLayout(new BoxLayout(coordPanel, BoxLayout.X_AXIS)); final Button button = new Button("Stop " + coord.getWorkerIdentifier()); button.setMaximumSize(new Dimension(200, 50)); button.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { if (coord.isRunning()) { coord.stop(); button.setLabel("Start " + coord.getWorkerIdentifier()); } else { try { coord.start(); } catch (LeasingException e) { LOG.error(e); } button.setLabel("Stop " + coord.getWorkerIdentifier()); } } }); coordPanel.add(button); JLabel label = new JLabel(); coordPanel.add(label); labels.put(coord.getWorkerIdentifier(), label); panel.add(coordPanel); } frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); new Thread() { // Key is lease key, value is green-ness as a value from 0 to 255. // Great variable name, huh? private Map<String, Integer> greenNesses = new HashMap<String, Integer>(); // Key is lease key, value is last owning worker private Map<String, String> lastOwners = new HashMap<String, String>(); @Override public void run() { while (true) { for (LeaseCoordinator<KinesisClientLease> coord : coordinators) { String workerIdentifier = coord.getWorkerIdentifier(); JLabel label = labels.get(workerIdentifier); List<KinesisClientLease> asgn = new ArrayList<KinesisClientLease>(coord.getAssignments()); Collections.sort(asgn, new Comparator<KinesisClientLease>() { @Override public int compare(KinesisClientLease arg0, KinesisClientLease arg1) { return arg0.getLeaseKey().compareTo(arg1.getLeaseKey()); } }); StringBuilder builder = new StringBuilder(); builder.append("<html>"); builder.append(workerIdentifier).append(":").append(asgn.size()).append(" "); for (KinesisClientLease lease : asgn) { String leaseKey = lease.getLeaseKey(); String lastOwner = lastOwners.get(leaseKey); // Color things green when they switch owners, decay the green-ness over time. Integer greenNess = greenNesses.get(leaseKey); if (greenNess == null || lastOwner == null || !lastOwner.equals(lease.getLeaseOwner())) { greenNess = 200; } else { greenNess = Math.max(0, greenNess - 20); } greenNesses.put(leaseKey, greenNess); lastOwners.put(leaseKey, lease.getLeaseOwner()); builder.append(String.format("<font color=\"%s\">%03d</font>", String.format("#00%02x00", greenNess), Integer.parseInt(leaseKey))).append(" "); } builder.append("</html>"); label.setText(builder.toString()); label.revalidate(); label.repaint(); } if (ticker.getText().equals("tick")) { ticker.setText("tock"); } else { ticker.setText("tick"); } try { Thread.sleep(200); } catch (InterruptedException e) { } } } }.start(); frame.pack(); frame.setVisible(true); for (LeaseCoordinator<KinesisClientLease> coord : coordinators) { coord.start(); } }
From source file:com.github.fritaly.svngraph.SvnGraph.java
public static void main(String[] args) throws Exception { if (args.length != 2) { System.out.println(String.format("%s <input-file> <output-file>", SvnGraph.class.getSimpleName())); System.exit(1);/*from w ww .j a v a 2 s. co m*/ } final File input = new File(args[0]); if (!input.exists()) { throw new IllegalArgumentException( String.format("The given file '%s' doesn't exist", input.getAbsolutePath())); } final File output = new File(args[1]); final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(input); final History history = new History(document); final Set<String> rootPaths = history.getRootPaths(); System.out.println(rootPaths); for (String path : rootPaths) { System.out.println(path); System.out.println(history.getHistory(path).getRevisions()); System.out.println(); } int count = 0; FileWriter fileWriter = null; GraphMLWriter graphWriter = null; try { fileWriter = new FileWriter(output); graphWriter = new GraphMLWriter(fileWriter); final NodeStyle tagStyle = graphWriter.getNodeStyle(); tagStyle.setFillColor(Color.WHITE); graphWriter.graph(); // map associating node labels to their corresponding node id in the graph final Map<String, String> nodeIdsPerLabel = new TreeMap<>(); // the node style associated to each branch final Map<String, NodeStyle> nodeStyles = new TreeMap<>(); for (Revision revision : history.getSignificantRevisions()) { System.out.println(revision.getNumber() + " - " + revision.getMessage()); // TODO Render also the deletion of branches // there should be only 1 significant update per revision (the one with action ADD) for (Update update : revision.getSignificantUpdates()) { if (update.isCopy()) { // a merge is also considered a copy final RevisionPath source = update.getCopySource(); System.out.println(String.format(" > %s %s from %s@%d", update.getAction(), update.getPath(), source.getPath(), source.getRevision())); final String sourceRoot = Utils.getRootName(source.getPath()); if (sourceRoot == null) { // skip the revisions whose associated root is // null (happens whether a branch was created // outside the 'branches' directory for // instance) System.err.println(String.format("Skipped revision %d because of a null root", source.getRevision())); continue; } final String sourceLabel = computeNodeLabel(sourceRoot, source.getRevision()); // create a node for the source (path, revision) final String sourceId; if (nodeIdsPerLabel.containsKey(sourceLabel)) { // retrieve the id of the existing node sourceId = nodeIdsPerLabel.get(sourceLabel); } else { // create the new node if (Utils.isTagPath(source.getPath())) { graphWriter.setNodeStyle(tagStyle); } else { if (!nodeStyles.containsKey(sourceRoot)) { final NodeStyle style = new NodeStyle(); style.setFillColor(randomColor()); nodeStyles.put(sourceRoot, style); } graphWriter.setNodeStyle(nodeStyles.get(sourceRoot)); } sourceId = graphWriter.node(sourceLabel); nodeIdsPerLabel.put(sourceLabel, sourceId); } // and another for the newly created directory final String targetRoot = Utils.getRootName(update.getPath()); if (targetRoot == null) { System.err.println(String.format("Skipped revision %d because of a null root", revision.getNumber())); continue; } final String targetLabel = computeNodeLabel(targetRoot, revision.getNumber()); if (Utils.isTagPath(update.getPath())) { graphWriter.setNodeStyle(tagStyle); } else { if (!nodeStyles.containsKey(targetRoot)) { final NodeStyle style = new NodeStyle(); style.setFillColor(randomColor()); nodeStyles.put(targetRoot, style); } graphWriter.setNodeStyle(nodeStyles.get(targetRoot)); } final String targetId; if (nodeIdsPerLabel.containsKey(targetLabel)) { // retrieve the id of the existing node targetId = nodeIdsPerLabel.get(targetLabel); } else { // create the new node if (Utils.isTagPath(update.getPath())) { graphWriter.setNodeStyle(tagStyle); } else { if (!nodeStyles.containsKey(targetRoot)) { final NodeStyle style = new NodeStyle(); style.setFillColor(randomColor()); nodeStyles.put(targetRoot, style); } graphWriter.setNodeStyle(nodeStyles.get(targetRoot)); } targetId = graphWriter.node(targetLabel); nodeIdsPerLabel.put(targetLabel, targetId); } // create an edge between the 2 nodes graphWriter.edge(sourceId, targetId); } else { System.out.println(String.format(" > %s %s", update.getAction(), update.getPath())); } } System.out.println(); count++; } // Dispatch the revisions per corresponding branch final Map<String, Set<Long>> revisionsPerBranch = new TreeMap<>(); for (String nodeLabel : nodeIdsPerLabel.keySet()) { if (nodeLabel.contains("@")) { final String branchName = StringUtils.substringBefore(nodeLabel, "@"); final long revision = Long.parseLong(StringUtils.substringAfter(nodeLabel, "@")); if (!revisionsPerBranch.containsKey(branchName)) { revisionsPerBranch.put(branchName, new TreeSet<Long>()); } revisionsPerBranch.get(branchName).add(revision); } else { throw new IllegalStateException(nodeLabel); } } // Recreate the missing edges between revisions from a same branch for (String branchName : revisionsPerBranch.keySet()) { final List<Long> branchRevisions = new ArrayList<>(revisionsPerBranch.get(branchName)); for (int i = 0; i < branchRevisions.size() - 1; i++) { final String nodeLabel1 = String.format("%s@%d", branchName, branchRevisions.get(i)); final String nodeLabel2 = String.format("%s@%d", branchName, branchRevisions.get(i + 1)); graphWriter.edge(nodeIdsPerLabel.get(nodeLabel1), nodeIdsPerLabel.get(nodeLabel2)); } } graphWriter.closeGraph(); System.out.println(String.format("Found %d significant revisions", count)); } finally { if (graphWriter != null) { graphWriter.close(); } if (fileWriter != null) { fileWriter.close(); } } System.out.println("Done"); }
From source file:net.dontdrinkandroot.lastfm.api.CheckImplementationStatus.java
public static void main(final String[] args) throws DocumentException, IOException { CheckImplementationStatus.xmlReader = new Parser(); CheckImplementationStatus.saxReader = new SAXReader(CheckImplementationStatus.xmlReader); final String packagePrefix = "net.dontdrinkandroot.lastfm.api.model."; final Map<String, Map<String, URL>> packages = CheckImplementationStatus.parseOverview(); final StringBuffer html = new StringBuffer(); html.append("<html>\n"); html.append("<head>\n"); html.append("<title>Implementation Status</title>\n"); html.append("</head>\n"); html.append("<body>\n"); html.append("<h1>Implementation Status</h1>\n"); final StringBuffer wiki = new StringBuffer(); int numImplemented = 0; int numTested = 0; int numMethods = 0; final List<String> packageList = new ArrayList<String>(packages.keySet()); Collections.sort(packageList); for (final String pkg : packageList) { System.out.println("Parsing " + pkg); html.append("<h2>" + pkg + "</h2>\n"); wiki.append("\n===== " + pkg + " =====\n\n"); Class<?> modelClass = null; final String className = packagePrefix + pkg; try {/*from w w w. ja v a2 s. c o m*/ modelClass = Class.forName(className); System.out.println("\tClass " + modelClass.getName() + " exists"); } catch (final ClassNotFoundException e) { // e.printStackTrace(); System.out.println("\t" + className + ": DOES NOT exist"); } Class<?> testClass = null; final String testClassName = packagePrefix + pkg + "Test"; try { testClass = Class.forName(testClassName); System.out.println("\tTestClass " + testClass.getName() + " exists"); } catch (final ClassNotFoundException e) { // e.printStackTrace(); System.out.println("\t" + testClassName + ": TestClass for DOES NOT exist"); } final List<String> methods = new ArrayList<String>(packages.get(pkg).keySet()); Collections.sort(methods); final Method[] classMethods = modelClass.getMethods(); final Method[] testMethods = testClass.getMethods(); html.append("<table>\n"); html.append("<tr><th>Method</th><th>Implemented</th><th>Tested</th></tr>\n"); wiki.append("^ Method ^ Implemented ^ Tested ^\n"); numMethods += methods.size(); for (final String method : methods) { System.out.println("\t\t parsing " + method); html.append("<tr>\n"); html.append("<td>" + method + "</td>\n"); wiki.append("| " + method + " "); boolean classMethodFound = false; for (final Method classMethod : classMethods) { if (classMethod.getName().equals(method)) { classMethodFound = true; break; } } if (classMethodFound) { System.out.println("\t\t\tMethod " + method + " found"); html.append("<td style=\"background-color: green\">true</td>\n"); wiki.append("| yes "); numImplemented++; } else { System.out.println("\t\t\t" + method + " NOT found"); html.append("<td style=\"background-color: red\">false</td>\n"); wiki.append("| **no** "); } boolean testMethodFound = false; final String testMethodName = "test" + StringUtils.capitalize(method); for (final Method testMethod : testMethods) { if (testMethod.getName().equals(testMethodName)) { testMethodFound = true; break; } } if (testMethodFound) { System.out.println("\t\t\tTestMethod " + method + " found"); html.append("<td style=\"background-color: green\">true</td>\n"); wiki.append("| yes |\n"); numTested++; } else { System.out.println("\t\t\t" + testMethodName + " NOT found"); html.append("<td style=\"background-color: red\">false</td>\n"); wiki.append("| **no** |\n"); } html.append("</tr>\n"); } html.append("</table>\n"); // for (String methodName : methods) { // URL url = pkg.getValue().get(methodName); // System.out.println("PARSING: " + pkg.getKey() + "." + methodName + ": " + url); // String html = loadIntoString(url); // String description = null; // Matcher descMatcher = descriptionPattern.matcher(html); // if (descMatcher.find()) { // description = descMatcher.group(1).trim(); // } // boolean post = false; // Matcher postMatcher = postPattern.matcher(html); // if (postMatcher.find()) { // post = true; // } // Matcher paramsMatcher = paramsPattern.matcher(html); // List<String[]> params = new ArrayList<String[]>(); // boolean authenticated = false; // if (paramsMatcher.find()) { // String paramsString = paramsMatcher.group(1); // Matcher paramMatcher = paramPattern.matcher(paramsString); // while (paramMatcher.find()) { // String[] param = new String[3]; // param[0] = paramMatcher.group(1); // param[1] = paramMatcher.group(3); // param[2] = paramMatcher.group(5); // // System.out.println(paramMatcher.group(1) + "|" + paramMatcher.group(3) + "|" + // paramMatcher.group(5)); // if (param[0].equals("")) { // /* DO NOTHING */ // } else if (param[0].equals("api_key")) { // /* DO NOTHING */ // } else if (param[0].equals("api_sig")) { // authenticated = true; // } else { // params.add(param); // } // } // } // } // count++; // } html.append("<hr />"); html.append("<p>" + numImplemented + "/" + numMethods + " implemented (" + numImplemented * 100 / numMethods + "%)</p>"); html.append("<p>" + numTested + "/" + numMethods + " tested (" + numTested * 100 / numMethods + "%)</p>"); html.append("</body>\n"); html.append("</html>\n"); FileOutputStream out = new FileOutputStream(new File(FileUtils.getTempDirectory(), "apistatus.html")); IOUtils.write(html, out); IOUtils.closeQuietly(out); out = new FileOutputStream(new File(FileUtils.getTempDirectory(), "apistatus.wiki.txt")); IOUtils.write(wiki, out); IOUtils.closeQuietly(out); }