List of usage examples for com.google.common.collect Iterables transform
@CheckReturnValue public static <F, T> Iterable<T> transform(final Iterable<F> fromIterable, final Function<? super F, ? extends T> function)
From source file:kn.uni.gis.dataimport.FormatStrangeFlickrFormat.java
License:asdf
public static void main(String[] args) throws IOException { Iterable<String> readLines = filterNulls(concatLines(Files.readLines(new File(INPUT), Charsets.UTF_8))); // BufferedReader reader = Files // .newReader(new File(INPUT), Charsets.UTF_8); // 1,20,12/*from ww w . j a va 2 s .c om*/ Files.write(Joiner.on("\n").skipNulls().join(Iterables.transform(readLines, new Function<String, String>() { @Override public String apply(String input) { // System.out.println(input); String[] split = input.split(";"); if (equalss(split[0], "524", "567", "2284", "2720")) { return null; } assertNumbers(split); String asdf = Joiner.on("\t").join(split[0], split[19], split[20], "Z", "M", split[3], ""); System.out.println(asdf); return asdf; } private void assertNumbers(String[] split) { if (!!!split[0].equals("Field1")) { Preconditions.checkArgument(Double.valueOf(split[19].replace(',', '.')) > 13, split[19] + Arrays.toString(split)); Preconditions.checkArgument(Double.valueOf(split[20].replace(',', '.')) > 52, split[20] + Arrays.toString(split)); } } })).replaceAll(",", "."), new File(OUTPUT), Charsets.UTF_8); }
From source file:com.facebook.swift.generator.Main.java
public static void main(final String... args) throws Exception { URI workingDirectory = new File(System.getProperty("user.dir")).getCanonicalFile().toURI(); SwiftGeneratorCommandLineConfig cliConfig = new SwiftGeneratorCommandLineConfig(); JCommander jCommander = new JCommander(cliConfig, args); jCommander.setProgramName(SwiftGenerator.class.getSimpleName()); if (cliConfig.inputFiles == null) { jCommander.usage();//w w w . j a v a2s . com return; } SwiftGeneratorConfig.Builder configBuilder = SwiftGeneratorConfig.builder().inputBase(workingDirectory) .outputFolder(cliConfig.outputDirectory).overridePackage(cliConfig.overridePackage) .defaultPackage(cliConfig.defaultPackage).generateIncludedCode(cliConfig.generateIncludedCode) .codeFlavor(cliConfig.generateBeans ? "java-regular" : "java-immutable"); for (SwiftGeneratorTweak tweak : cliConfig.tweaks) { configBuilder.addTweak(tweak); } if (cliConfig.usePlainJavaNamespace) { configBuilder.addTweak(SwiftGeneratorTweak.USE_PLAIN_JAVA_NAMESPACE); } Iterable<URI> inputs = Iterables.transform(cliConfig.inputFiles, FILE_TO_URI_TRANSFORM); new SwiftGenerator(configBuilder.build()).parse(inputs); }
From source file:Dictconvert_with_elements.java
public static void main(String args[]) throws Exception { String targetNameSpace = args[1]; String targetNameSpaceVar = args[2]; Map<String, String> namespaces = ImmutableMap.<String, String>of(targetNameSpace, targetNameSpaceVar); HashMap<String, String> keys = new HashMap<String, String>(); DocumentBuilderFactory builderF = DocumentBuilderFactory.newInstance(); builderF.setNamespaceAware(true);//from w w w . j a v a2s . c o m DocumentBuilder builder = builderF.newDocumentBuilder(); Document xmlDictionary = builder.parse(args[0]); for (Map.Entry<String, String> e : namespaces.entrySet()) { System.out.println("XMLCh " + e.getValue() + "[] = {" + Joiner.on(",") .join(Iterables.transform(Lists.charactersOf(e.getKey()), new Function<Character, String>() { public String apply(Character o) { return "\'" + o + "\'"; } })) + ",\'\\0\'};"); } Map<String, String> localKeys = new HashMap<String, String>(); for (Element e : Iterables.filter(getElements(xmlDictionary.getDocumentElement()), new Predicate<Element>() { public boolean apply(Element element) { return element.getAttribute("type").equals("localSet"); } })) { String name = e.getLocalName(); String keyName = "key_" + Joiner.on("").join(e.getAttribute("key").split("\\s")); localKeys.put(keyName, keyName + "_name"); System.out.println("mxfKey " + keyName + " = {" + Joiner.on(",").join(Iterables .transform(Arrays.asList(e.getAttribute("key").split("\\s")), new Function<String, String>() { public String apply(String o) { return "0x" + o; } })) + "};"); System.out.println("XMLCh " + keyName + "_name[] = {" + Joiner.on(",") .join(Iterables.transform(Lists.charactersOf(name), new Function<Character, String>() { public String apply(Character o) { return "\'" + o + "\'"; } })) + ",\'\\0\'};"); //System.out.println("st434dict.insert(std::pair<mxfKey, st434info*>("); //System.out.println('\t' + keyName + ','); //System.out.println("\tnew st434info(/* " + name + " */ " + keyName + "_name, /* " + targetNameSpace + " */ " + namespaces.get(targetNameSpace) + ")"); //System.out.println("));"); for (Element ee : getElements(e)) { String elemKeyName = "key_" + Joiner.on("").join(ee.getAttribute("globalKey").split("\\s")); localKeys.put(elemKeyName, elemKeyName + "_name"); System.out.println("mxfKey " + elemKeyName + " = {" + Joiner.on(",").join(Iterables.transform( Arrays.asList(ee.getAttribute("globalKey").split("\\s")), new Function<String, String>() { public String apply(String o) { return "0x" + o; } })) + "};"); System.out.println("XMLCh " + elemKeyName + "_name[] = {" + Joiner.on(",").join(Iterables .transform(Lists.charactersOf(ee.getLocalName()), new Function<Character, String>() { public String apply(Character o) { return "\'" + o + "\'"; } })) + ",\'\\0\'};"); //System.out.println("st434dict.insert(std::pair<mxfKey, st434info*>("); //System.out.println('\t' + elemKeyName + ','); //System.out.println("\tnew st434info(/* " + ee.getLocalName() + " */ " + elemKeyName + "_name, /* " + targetNameSpace + " */ " + namespaces.get(targetNameSpace) + ")"); //System.out.println("));"); } } if (localKeys.size() > 0) { String arrayName = "arr_" + targetNameSpaceVar; System.out.println("const void* " + arrayName + "[][2] = {"); System.out.println(Joiner.on(", \n").join( Iterables.transform(localKeys.entrySet(), new Function<Map.Entry<String, String>, String>() { @Override public String apply(java.util.Map.Entry<String, String> e) { return "{ &" + e.getKey() + ", " + e.getValue() + " }"; } }))); System.out.println("};"); System.out.println("for (int i=0; i<" + localKeys.size() + ";i++) {"); System.out.println("\tst434dict.insert(std::pair<const mxfKey, st434info*>("); System.out.println("\t*(const mxfKey*)" + arrayName + "[i][0], "); System.out .println("\tnew st434info((const XMLCh*)" + arrayName + "[i][1], " + targetNameSpaceVar + ")"); System.out.println("));"); System.out.println("}"); } }
From source file:org.apache.mahout.knn.Vectorize20NewsGroups.java
public static void main(String[] args) throws IOException { String weightingCode = args[0]; boolean normalize = weightingCode.endsWith("c"); legalHeaders = Sets.newHashSet();/* w ww . java2 s . c o m*/ Iterables.addAll(legalHeaders, Iterables.transform(Splitter.on(",").trimResults().split(args[1]), new Function<String, String>() { @Override public String apply(String s) { return s.toLowerCase(); } })); includeQuotes = Boolean.parseBoolean(args[2]); CorpusWeighting cw = CorpusWeighting.parse(weightingCode); if (cw.needCorpusWeights()) { Multiset<String> wordFrequency = HashMultiset.create(); Set<String> documents = Sets.newHashSet(); for (String file : Arrays.asList(args).subList(4, args.length)) { recursivelyCount(documents, wordFrequency, new File(file)); } cw.setCorpusCounts(wordFrequency, documents.size()); } int dimension = Integer.parseInt(args[3]); Configuration conf = new Configuration(); SequenceFile.Writer sf = SequenceFile.createWriter(FileSystem.getLocal(conf), conf, new Path("output"), Text.class, VectorWritable.class); PrintWriter csv = new PrintWriter("output.csv"); for (String file : Arrays.asList(args).subList(4, args.length)) { recursivelyVectorize(csv, sf, new File(file), cw, normalize, dimension); } csv.close(); sf.close(); }
From source file:org.apache.mahout.knn.tools.Vectorize20NewsGroups.java
public static void main(String[] args) throws IOException { String weightingCode = args[0]; boolean normalize = weightingCode.endsWith("c"); legalHeaders = Sets.newHashSet();/*from w ww . j a va2 s.c o m*/ Iterables.addAll(legalHeaders, Iterables.transform(Splitter.on(",").trimResults().split(args[1]), new Function<String, String>() { @Override public String apply(String s) { return s.toLowerCase(); } })); includeQuotes = Boolean.parseBoolean(args[2]); CorpusWeighting cw = CorpusWeighting.parse(weightingCode); if (cw.needCorpusWeights()) { Multiset<String> wordFrequency = HashMultiset.create(); Set<String> documents = Sets.newHashSet(); for (String file : Arrays.asList(args).subList(4, args.length)) { recursivelyCount(documents, wordFrequency, new File(file)); } cw.setCorpusCounts(wordFrequency, documents.size()); } int dimension = Integer.parseInt(args[3]); Configuration conf = new Configuration(); SequenceFile.Writer sf = SequenceFile.createWriter(FileSystem.getLocal(conf), conf, new Path("output-file"), Text.class, VectorWritable.class); PrintWriter csv = new PrintWriter("output-file.csv"); for (String file : Arrays.asList(args).subList(4, args.length)) { recursivelyVectorize(csv, sf, new File(file), cw, normalize, dimension); } csv.close(); sf.close(); }
From source file:co.cask.cdap.data.stream.StreamTailer.java
public static void main(String[] args) throws Exception { if (args.length < 1) { System.out.println(String.format("Usage: java %s [streamName]", StreamTailer.class.getName())); return;/*www. j a va2 s .c o m*/ } String streamName = args[0]; CConfiguration cConf = CConfiguration.create(); Configuration hConf = new Configuration(); Injector injector = Guice.createInjector(new ConfigModule(cConf, hConf), new DataFabricModules().getDistributedModules(), new DataSetsModules().getDistributedModules(), new LocationRuntimeModule().getDistributedModules(), new StreamAdminModules().getDistributedModules(), new NotificationFeedClientModule()); StreamAdmin streamAdmin = injector.getInstance(StreamAdmin.class); //TODO: get namespace from commandline arguments Id.Stream streamId = Id.Stream.from(Constants.DEFAULT_NAMESPACE, streamName); StreamConfig streamConfig = streamAdmin.getConfig(streamId); Location streamLocation = streamConfig.getLocation(); List<Location> eventFiles = Lists.newArrayList(); for (Location partition : streamLocation.list()) { if (!partition.isDirectory()) { continue; } for (Location file : partition.list()) { if (StreamFileType.EVENT.isMatched(file.getName())) { eventFiles.add(file); } } } int generation = StreamUtils.getGeneration(streamConfig); MultiLiveStreamFileReader reader = new MultiLiveStreamFileReader(streamConfig, ImmutableList.copyOf(Iterables.transform(eventFiles, createOffsetConverter(generation)))); List<StreamEvent> events = Lists.newArrayList(); while (reader.read(events, 10, 100, TimeUnit.MILLISECONDS) >= 0) { for (StreamEvent event : events) { System.out.println(event.getTimestamp() + " " + Charsets.UTF_8.decode(event.getBody())); } events.clear(); } reader.close(); }
From source file:edu.byu.nlp.data.app.DataExporter.java
public static void main(String[] args) throws IOException { args = new ArgumentParser(DataExporter.class).parseArgs(args).getPositionalArgs(); RandomGenerator rnd = new MersenneTwister(); Dataset dataset = readData(rnd);//ww w. ja v a 2 s.c om Iterable<String> it = Iterables.transform(dataset, new Instance2SVMLitePlus()); if (args.length < 1) { Writers.writeLines(new PrintWriter(new BufferedOutputStream(System.out)), it); } else { Files2.writeLines(it, args[0]); } }
From source file:cosmos.example.BuildingPermitsExample.java
public static void main(String[] args) throws Exception { BuildingPermitsExample example = new BuildingPermitsExample(); new JCommander(example, args); File inputFile = new File(example.fileName); Preconditions.checkArgument(inputFile.exists() && inputFile.isFile() && inputFile.canRead(), "Expected " + example.fileName + " to be a readable file"); String zookeepers;//ww w . ja v a2 s .c o m String instanceName; Connector connector; MiniAccumuloCluster mac = null; File macDir = null; // Use the MiniAccumuloCluster is requested if (example.useMiniAccumuloCluster) { macDir = Files.createTempDir(); String password = "password"; MiniAccumuloConfig config = new MiniAccumuloConfig(macDir, password); config.setNumTservers(1); mac = new MiniAccumuloCluster(config); mac.start(); zookeepers = mac.getZooKeepers(); instanceName = mac.getInstanceName(); ZooKeeperInstance instance = new ZooKeeperInstance(instanceName, zookeepers); connector = instance.getConnector("root", new PasswordToken(password)); } else { // Otherwise connect to a running instance zookeepers = example.zookeepers; instanceName = example.instanceName; ZooKeeperInstance instance = new ZooKeeperInstance(instanceName, zookeepers); connector = instance.getConnector(example.username, new PasswordToken(example.password)); } // Instantiate an instance of Cosmos Cosmos cosmos = new CosmosImpl(zookeepers); // Create a definition for the data we want to load Store id = Store.create(connector, new Authorizations(), AscendingIndexIdentitySet.create()); // Register the definition with Cosmos so it can track its progress. cosmos.register(id); // Load all of the data from our inputFile LoadBuildingPermits loader = new LoadBuildingPermits(cosmos, id, inputFile); loader.run(); // Finalize the SortableResult which will prevent future writes to the data set cosmos.finalize(id); // Flush the ingest traces to the backend so we can see the results; id.sendTraces(); // Get back the Set of Columns that we've ingested. Set<Column> schema = Sets.newHashSet(cosmos.columns(id)); log.debug("\nColumns: " + schema); Iterator<Column> iter = schema.iterator(); while (iter.hasNext()) { Column c = iter.next(); // Remove the internal ID field and columns that begin with CONTRACTOR_ if (c.equals(LoadBuildingPermits.ID) || c.name().startsWith("CONTRACTOR_")) { iter.remove(); } } Iterable<Index> indices = Iterables.transform(schema, new Function<Column, Index>() { @Override public Index apply(Column col) { return Index.define(col); } }); // Ensure that we have locality groups set as we expect log.info("Ensure locality groups are set"); id.optimizeIndices(indices); // Compact down the data for this SortableResult log.info("Issuing compaction for relevant data"); id.consolidate(); final int numTopValues = 10; // Walk through each column in the result set for (Column c : schema) { Stopwatch sw = new Stopwatch(); sw.start(); // Get the number of times we've seen each value in a given column CloseableIterable<Entry<RecordValue<?>, Long>> groupingsInColumn = cosmos.groupResults(id, c); log.info(c.name() + ":"); // Iterate over the counts, collecting the top N values in each column TreeMap<Long, RecordValue<?>> topValues = Maps.newTreeMap(); for (Entry<RecordValue<?>, Long> entry : groupingsInColumn) { if (topValues.size() == numTopValues) { Entry<Long, RecordValue<?>> least = topValues.pollFirstEntry(); if (least.getKey() < entry.getValue()) { topValues.put(entry.getValue(), entry.getKey()); } else { topValues.put(least.getKey(), least.getValue()); } } else if (topValues.size() < numTopValues) { topValues.put(entry.getValue(), entry.getKey()); } } for (Long key : topValues.descendingKeySet()) { log.info(topValues.get(key).value() + " occurred " + key + " times"); } sw.stop(); log.info("Took " + sw.toString() + " to run query.\n"); } log.info("Deleting records"); // Delete the records we've ingested if (!example.useMiniAccumuloCluster) { // Because I'm lazy and don't want to wait around to run the BatchDeleter when we're just going // to rm -rf the directory in a few secs. cosmos.delete(id); } // And shut down Cosmos cosmos.close(); log.info("Cosmos stopped"); // If we were using MAC, also stop that if (example.useMiniAccumuloCluster && null != mac) { mac.stop(); if (null != macDir) { FileUtils.deleteDirectory(macDir); } } }
From source file:io.druid.server.sql.SQLRunner.java
public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption("h", "help", false, "help"); options.addOption("v", false, "verbose"); options.addOption("e", "host", true, "endpoint [hostname:port]"); CommandLine cmd = new GnuParser().parse(options, args); if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("SQLRunner", options); System.exit(2);/*w w w . j a va 2s . co m*/ } String hostname = cmd.getOptionValue("e", "localhost:8080"); String sql = cmd.getArgs().length > 0 ? cmd.getArgs()[0] : STATEMENT; ObjectMapper objectMapper = new DefaultObjectMapper(); ObjectWriter jsonWriter = objectMapper.writerWithDefaultPrettyPrinter(); CharStream stream = new ANTLRInputStream(sql); DruidSQLLexer lexer = new DruidSQLLexer(stream); TokenStream tokenStream = new CommonTokenStream(lexer); DruidSQLParser parser = new DruidSQLParser(tokenStream); lexer.removeErrorListeners(); parser.removeErrorListeners(); lexer.addErrorListener(ConsoleErrorListener.INSTANCE); parser.addErrorListener(ConsoleErrorListener.INSTANCE); try { DruidSQLParser.QueryContext queryContext = parser.query(); if (parser.getNumberOfSyntaxErrors() > 0) throw new IllegalStateException(); // parser.setBuildParseTree(true); // System.err.println(q.toStringTree(parser)); } catch (Exception e) { String msg = e.getMessage(); if (msg != null) System.err.println(e); System.exit(1); } final Query query; final TypeReference typeRef; boolean groupBy = false; if (parser.groupByDimensions.isEmpty()) { query = Druids.newTimeseriesQueryBuilder().dataSource(parser.getDataSource()) .aggregators(new ArrayList<AggregatorFactory>(parser.aggregators.values())) .postAggregators(parser.postAggregators).intervals(parser.intervals) .granularity(parser.granularity).filters(parser.filter).build(); typeRef = new TypeReference<List<Result<TimeseriesResultValue>>>() { }; } else { query = GroupByQuery.builder().setDataSource(parser.getDataSource()) .setAggregatorSpecs(new ArrayList<AggregatorFactory>(parser.aggregators.values())) .setPostAggregatorSpecs(parser.postAggregators).setInterval(parser.intervals) .setGranularity(parser.granularity).setDimFilter(parser.filter) .setDimensions(new ArrayList<DimensionSpec>(parser.groupByDimensions.values())).build(); typeRef = new TypeReference<List<Row>>() { }; groupBy = true; } String queryStr = jsonWriter.writeValueAsString(query); if (cmd.hasOption("v")) System.err.println(queryStr); URL url = new URL(String.format("http://%s/druid/v2/?pretty", hostname)); final URLConnection urlConnection = url.openConnection(); urlConnection.addRequestProperty("content-type", MediaType.APPLICATION_JSON); urlConnection.getOutputStream().write(StringUtils.toUtf8(queryStr)); BufferedReader stdInput = new BufferedReader( new InputStreamReader(urlConnection.getInputStream(), Charsets.UTF_8)); Object res = objectMapper.readValue(stdInput, typeRef); Joiner tabJoiner = Joiner.on("\t"); if (groupBy) { List<Row> rows = (List<Row>) res; Iterable<String> dimensions = Iterables.transform(parser.groupByDimensions.values(), new Function<DimensionSpec, String>() { @Override public String apply(@Nullable DimensionSpec input) { return input.getOutputName(); } }); System.out.println( tabJoiner.join(Iterables.concat(Lists.newArrayList("timestamp"), dimensions, parser.fields))); for (final Row r : rows) { System.out.println(tabJoiner.join(Iterables.concat( Lists.newArrayList(parser.granularity.toDateTime(r.getTimestampFromEpoch())), Iterables.transform(parser.groupByDimensions.values(), new Function<DimensionSpec, String>() { @Override public String apply(@Nullable DimensionSpec input) { return Joiner.on(",").join(r.getDimension(input.getOutputName())); } }), Iterables.transform(parser.fields, new Function<String, Object>() { @Override public Object apply(@Nullable String input) { return r.getFloatMetric(input); } })))); } } else { List<Result<TimeseriesResultValue>> rows = (List<Result<TimeseriesResultValue>>) res; System.out.println(tabJoiner.join(Iterables.concat(Lists.newArrayList("timestamp"), parser.fields))); for (final Result<TimeseriesResultValue> r : rows) { System.out.println(tabJoiner.join(Iterables.concat(Lists.newArrayList(r.getTimestamp()), Lists.transform(parser.fields, new Function<String, Object>() { @Override public Object apply(@Nullable String input) { return r.getValue().getMetric(input); } })))); } } CloseQuietly.close(stdInput); }
From source file:Dictconvert.java
public static void main(String args[]) throws Exception { final ImmutableMap<String, String> nameMap = new ImmutableMap.Builder<String, String>() .put("ebucoreMainFramework", "ebucoreMainFramework") .put("ebucorePartFramework", "ebucorePartFramework") .put("ebucoreMetadataSchemeInformation", "ebucoreMetadataSchemeInformation") /* List mainly generated using AWK: awk '{t=$2; gsub(/ebucore/, "", t); print "\x27"$2"\x27:\x27" tolower(substr(t, 1, 1))substr(t, 2)"\x27," }' < tmp.txt > tmpout.txt *//*from ww w . jav a 2 s.c o m*/ .put("ebucoreEntity", "entity").put("ebucoreContact", "contact") .put("ebucoreContactDetails", "details").put("ebucoreAddress", "address") .put("ebucoreRegion", "region").put("ebucoreCompoundName", "compoundName") .put("ebucoreRole", "role").put("ebucoreCountry", "country") .put("ebucoreTextualAnnotation", "textualAnnotation").put("ebucoreBasicLink", "basicLink") .put("ebucoreTypeGroup", "typeGroup").put("ebucoreOrganisation", "organisation") .put("ebucoreOrganisationDepartment", "organisationDepartment") .put("ebucoreCoreMetadata", "coreMetadata").put("ebucoreIdentifier", "identifier") .put("ebucoreTitle", "title").put("ebucoreAlternativeTitle", "alternativeTitle") .put("ebucoreFormatGroup", "formatGroup").put("ebucoreStatusGroup", "statusGroup") .put("ebucoreSubject", "subject").put("ebucoreDescription", "description") .put("ebucoreDate", "date").put("ebucoreDateType", "dateType").put("ebucoreType", "type") .put("ebucoreObjectType", "objectType").put("ebucoreGenre", "genre") .put("ebucoreTargetAudience", "targetAudience").put("ebucoreLanguage", "language") .put("ebucoreCoverage", "coverage").put("ebucoreSpatial", "spatial") .put("ebucoreLocation", "location").put("ebucoreCoordinates", "coordinates") .put("ebucoreTemporal", "temporal").put("ebucorePeriodOfTime", "periodOfTime") .put("ebucoreRights", "rights").put("ebucoreVersion", "version").put("ebucoreRating", "rating") .put("ebucorePublicationHistoryEvent", "publicationHistoryEvent") .put("ebucorePublicationHistory", "publicationHistory") .put("ebucorePublicationChannel", "publicationChannel") .put("ebucorePublicationMedium", "publicationMedium") .put("ebucorePublicationService", "publicationService") .put("ebucoreCustomRelation", "customRelation").put("ebucoreBasicRelation", "basicRelation") .put("ebucorePartMetadata", "partMetadata").put("ebucoreFormat", "format") .put("ebucoreVideoFormat", "videoFormat").put("ebucoreImageFormat", "imageFormat") .put("ebucoreAudioFormat", "audioFormat").put("ebucoreTrack", "track") .put("ebucoreDataFormat", "dataFormat").put("ebucoreCaptioning", "captioning") .put("ebucoreSubtitling", "subtitling").put("ebucoreAncillaryData", "ancillaryData") .put("ebucoreSigningFormat", "signingFormat") .put("ebucoreTechnicalAttributeString", "technicalAttributeString") .put("ebucoreTechnicalAttributeInt8", "technicalAttributeInt8") .put("ebucoreTechnicalAttributeInt16", "technicalAttributeInt16") .put("ebucoreTechnicalAttributeInt32", "technicalAttributeInt32") .put("ebucoreTechnicalAttributeInt64", "technicalAttributeInt64") .put("ebucoreTechnicalAttributeUInt8", "technicalAttributeUInt8") .put("ebucoreTechnicalAttributeUInt16", "technicalAttributeUInt16") .put("ebucoreTechnicalAttributeUInt32", "technicalAttributeUInt32") .put("ebucoreTechnicalAttributeUInt64", "technicalAttributeUInt64") .put("ebucoreTechnicalAttributeFloat", "technicalAttributeFloat") .put("ebucoreTechnicalAttributeRational", "technicalAttributeRational") .put("ebucoreTechnicalAttributeAnyURI", "technicalAttributeAnyURI") .put("ebucoreTechnicalAttributeBoolean", "technicalAttributeBoolean") .put("ebucoreDimension", "dimension").put("ebucoreWidth", "width").put("ebucoreHeight", "height") .put("ebucorePackageInfo", "packageInfo").put("ebucoreMedium", "medium") .put("ebucoreCodec", "codec").put("ebucoreRational", "rational") .put("ebucoreAspectRatio", "aspectRatio").build(); String key_ns_ebucore_1 = "urn:ebu:metadata-schema:smpteclass13/groups/ebucore_2013"; Map<String, String> namespaces = ImmutableMap.<String, String>of(key_ns_ebucore_1, "key_ns_ebucore_1"); HashMap<String, String> keys = new HashMap<String, String>(); DocumentBuilderFactory builderF = DocumentBuilderFactory.newInstance(); builderF.setNamespaceAware(true); DocumentBuilder builder = builderF.newDocumentBuilder(); Document xmlDictionary = builder.parse(args[0]); Map<String, String> localKeys = new HashMap<String, String>(); for (Map.Entry<String, String> e : namespaces.entrySet()) { System.out.println("XMLCh " + e.getValue() + "[] = {" + Joiner.on(",") .join(Iterables.transform(Lists.charactersOf(e.getKey()), new Function<Character, String>() { public String apply(Character o) { return "\'" + o + "\'"; } })) + ",\'\\0\'};"); } for (Element e : Iterables.filter(getElements(xmlDictionary.getDocumentElement()), new Predicate<Element>() { public boolean apply(Element element) { return element.getAttribute("type").equals("localSet"); } })) { String name = nameMap.get(e.getLocalName()); String keyName = "key_" + Joiner.on("").join(e.getAttribute("key").split("\\s")); localKeys.put(keyName, keyName + "_name"); System.out.println("const mxfKey " + keyName + " = {" + Joiner.on(",").join(Iterables .transform(Arrays.asList(e.getAttribute("key").split("\\s")), new Function<String, String>() { public String apply(String o) { return "0x" + o; } })) + "};"); System.out.println("const XMLCh " + keyName + "_name[] = {" + Joiner.on(",") .join(Iterables.transform(Lists.charactersOf(name), new Function<Character, String>() { public String apply(Character o) { return "\'" + o + "\'"; } })) + ",\'\\0\'};"); //System.out.println("st434dict.insert(std::pair<const mxfKey, st434info*>("); //System.out.println('\t' + keyName + ','); //System.out.println("\tnew st434info(/* " + e.getLocalName() + " */ " + keyName + "_name, /* " + key_ns_ebucore_1 + " */ " + namespaces.get(key_ns_ebucore_1) + ")"); //System.out.println("));"); } if (localKeys.size() > 0) { String arrayName = "arr_ebucore_elems"; System.out.println("const void* " + arrayName + "[][2] = {"); System.out.println(Joiner.on(", \n").join( Iterables.transform(localKeys.entrySet(), new Function<Map.Entry<String, String>, String>() { @Override public String apply(java.util.Map.Entry<String, String> e) { return "{ &" + e.getKey() + ", " + e.getValue() + " }"; } }))); System.out.println("};"); System.out.println("for (int i=0; i<" + localKeys.size() + ";i++) {"); System.out.println("\tst434dict.insert(std::pair<const mxfKey, st434info*>("); System.out.println("\t*(const mxfKey*)" + arrayName + "[i][0], "); System.out.println("\tnew st434info((const XMLCh*)" + arrayName + "[i][1], key_ns_ebucore_1)"); System.out.println("));"); System.out.println("}"); } }