List of usage examples for com.google.common.collect Multimap put
boolean put(@Nullable K key, @Nullable V value);
From source file:com.cloudbees.api.Main.java
public static void main(String[] args) throws Exception { File beesCredentialsFile = new File(System.getProperty("user.home"), ".bees/bees.config"); Preconditions.checkArgument(beesCredentialsFile.exists(), "File %s not found", beesCredentialsFile); Properties beesCredentials = new Properties(); beesCredentials.load(new FileInputStream(beesCredentialsFile)); String apiUrl = "https://api.cloudbees.com/api"; String apiKey = beesCredentials.getProperty("bees.api.key"); String secret = beesCredentials.getProperty("bees.api.secret"); BeesClient client = new BeesClient(apiUrl, apiKey, secret, "xml", "1.0"); client.setVerbose(false);/*www. j a va 2s . c o m*/ URL databasesUrl = Thread.currentThread().getContextClassLoader().getResource("databases.txt"); Preconditions.checkNotNull(databasesUrl, "File 'databases.txt' NOT found in the classpath"); Collection<String> databaseNames; try { databaseNames = Sets.newTreeSet(Resources.readLines(databasesUrl, Charsets.ISO_8859_1)); } catch (Exception e) { throw Throwables.propagate(e); } databaseNames = Collections2.transform(databaseNames, new Function<String, String>() { @Nullable @Override public String apply(@Nullable String input) { // {host_db_create,<<"tco_q5rm">>,<<"TCO_q5rm">>, if (input == null) return null; if (input.startsWith("#")) return null; if (input.indexOf('"') == -1) { logger.warn("Skip invalid line {}", input); return null; } input = input.substring(input.indexOf('"') + 1); if (input.indexOf('"') == -1) { logger.warn("Skip invalid line {}", input); return null; } return input.substring(0, input.indexOf('"')); } }); databaseNames = Collections2.filter(databaseNames, new Predicate<String>() { @Override public boolean apply(@Nullable String s) { return !Strings.isNullOrEmpty(s); } }); Multimap<String, String> databasesByAccount = ArrayListMultimap.create(); Class.forName("com.mysql.jdbc.Driver"); for (String databaseName : databaseNames) { try { DatabaseInfo databaseInfo = client.databaseInfo(databaseName, true); databasesByAccount.put(databaseInfo.getOwner(), databaseInfo.getName()); logger.debug("Evaluate " + databaseInfo.getName()); if (true == false) { // Hibernate logger.info("Hibernate {}", databaseName); Map<String, String> params = new HashMap<String, String>(); params.put("database_id", databaseName); String url = client.getRequestURL("database.hibernate", params); String response = client.executeRequest(url); DatabaseInfoResponse apiResponse = (DatabaseInfoResponse) client.readResponse(response); logger.info("DB {} status: {}", apiResponse.getDatabaseInfo().getName(), apiResponse.getDatabaseInfo().getStatus()); } if (true == false) { // Hibernate logger.info("Activate {}", databaseName); Map<String, String> params = new HashMap<String, String>(); params.put("database_id", databaseName); String url = client.getRequestURL("database.activate", params); String response = client.executeRequest(url); DatabaseInfoResponse apiResponse = (DatabaseInfoResponse) client.readResponse(response); logger.info("DB {} status: {}", apiResponse.getDatabaseInfo().getName(), apiResponse.getDatabaseInfo().getStatus()); } String dbUrl = "jdbc:mysql://" + databaseInfo.getMaster() + "/" + databaseInfo.getName(); logger.info("Connect to {} user={}", dbUrl, databaseInfo.getUsername()); Connection cnn = DriverManager.getConnection(dbUrl, databaseInfo.getUsername(), databaseInfo.getPassword()); cnn.setAutoCommit(false); cnn.close(); } catch (Exception e) { logger.warn("Exception for {}", databaseName, e); } } System.out.println("OWNERS"); for (String account : databasesByAccount.keySet()) { System.out.println(account + ": " + Joiner.on(", ").join(databasesByAccount.get(account))); } }
From source file:com.netflix.genie.client.sample.ClusterServiceSampleClient.java
/** * Main for running client code.//ww w. j a va 2s . c o m * * @param args program arguments * @throws Exception On issue. */ public static void main(final String[] args) throws Exception { // Initialize Eureka, if it is being used // LOG.info("Initializing Eureka"); // ClusterServiceClient.initEureka("test"); LOG.info("Initializing list of Genie servers"); ConfigurationManager.getConfigInstance().setProperty("genie2Client.ribbon.listOfServers", "localhost:7001"); LOG.info("Initializing ApplicationServiceClient"); final ApplicationServiceClient appClient = ApplicationServiceClient.getInstance(); final Application app1 = appClient.createApplication( ApplicationServiceSampleClient.getSampleApplication(ApplicationServiceSampleClient.ID)); LOG.info("Created application:"); LOG.info(app1.toString()); final Application app2 = appClient.createApplication( ApplicationServiceSampleClient.getSampleApplication(ApplicationServiceSampleClient.ID + "2")); LOG.info("Created application:"); LOG.info(app2.toString()); LOG.info("Initializing CommandServiceClient"); final CommandServiceClient commandClient = CommandServiceClient.getInstance(); LOG.info("Creating command pig13_mr2"); final Command command1 = commandClient .createCommand(CommandServiceSampleClient.createSampleCommand(CommandServiceSampleClient.ID)); commandClient.setApplicationForCommand(command1.getId(), app1); LOG.info("Created command:"); LOG.info(command1.toString()); final List<Command> commands = new ArrayList<>(); commands.add(command1); LOG.info("Initializing ClusterConfigServiceClient"); final ClusterServiceClient clusterClient = ClusterServiceClient.getInstance(); LOG.info("Creating new cluster configuration"); final Cluster cluster1 = clusterClient.createCluster(createSampleCluster(ID)); clusterClient.addCommandsToCluster(cluster1.getId(), commands); LOG.info("Cluster config created with id: " + cluster1.getId()); LOG.info(cluster1.toString()); LOG.info("Getting cluster config by id"); final Cluster cluster2 = clusterClient.getCluster(cluster1.getId()); LOG.info(cluster2.toString()); LOG.info("Getting clusterConfigs using specified filter criteria"); final Multimap<String, String> params = ArrayListMultimap.create(); params.put("name", NAME); params.put("adHoc", "false"); params.put("test", "true"); params.put("limit", "3"); final List<Cluster> clusters = clusterClient.getClusters(params); if (clusters != null && !clusters.isEmpty()) { for (final Cluster cluster : clusters) { LOG.info(cluster.toString()); } } else { LOG.info("No clusters found for parameters"); } LOG.info("Configurations for cluster with id " + cluster1.getId()); final Set<String> configs = clusterClient.getConfigsForCluster(cluster1.getId()); for (final String config : configs) { LOG.info("Config = " + config); } LOG.info("Adding configurations to cluster with id " + cluster1.getId()); final Set<String> newConfigs = new HashSet<>(); newConfigs.add("someNewConfigFile"); newConfigs.add("someOtherNewConfigFile"); final Set<String> configs2 = clusterClient.addConfigsToCluster(cluster1.getId(), newConfigs); for (final String config : configs2) { LOG.info("Config = " + config); } LOG.info("Updating set of configuration files associated with id " + cluster1.getId()); //This should remove the original config leaving only the two in this set final Set<String> configs3 = clusterClient.updateConfigsForCluster(cluster1.getId(), newConfigs); for (final String config : configs3) { LOG.info("Config = " + config); } /**************** Begin tests for tag Api's *********************/ LOG.info("Get tags for cluster with id " + cluster1.getId()); final Set<String> tags = cluster1.getTags(); for (final String tag : tags) { LOG.info("Tag = " + tag); } LOG.info("Adding tags to cluster with id " + cluster1.getId()); final Set<String> newTags = new HashSet<>(); newTags.add("tag1"); newTags.add("tag2"); final Set<String> tags2 = clusterClient.addTagsToCluster(cluster1.getId(), newTags); for (final String tag : tags2) { LOG.info("Tag = " + tag); } LOG.info("Updating set of tags associated with id " + cluster1.getId()); //This should remove the original config leaving only the two in this set final Set<String> tags3 = clusterClient.updateTagsForCluster(cluster1.getId(), newTags); for (final String tag : tags3) { LOG.info("Tag = " + tag); } LOG.info("Deleting one tag from the cluster with id " + cluster1.getId()); //This should remove the "tag3" from the tags final Set<String> tags5 = clusterClient.removeTagForCluster(cluster1.getId(), "tag1"); for (final String tag : tags5) { //Shouldn't print anything LOG.info("Tag = " + tag); } LOG.info("Deleting all the tags from the cluster with id " + cluster1.getId()); //This should remove the original config leaving only the two in this set final Set<String> tags4 = clusterClient.removeAllTagsForCluster(cluster1.getId()); for (final String tag : tags4) { //Shouldn't print anything LOG.info("Config = " + tag); } /********************** End tests for tag Api's **********************/ LOG.info("Commands for cluster with id " + cluster1.getId()); final List<Command> commands1 = clusterClient.getCommandsForCluster(cluster1.getId()); for (final Command command : commands1) { LOG.info("Command = " + command); } LOG.info("Adding commands to cluster with id " + cluster1.getId()); final List<Command> newCmds = new ArrayList<>(); newCmds.add(commandClient.createCommand(CommandServiceSampleClient.createSampleCommand(ID + "something"))); newCmds.add(commandClient.createCommand(CommandServiceSampleClient.createSampleCommand(null))); final List<Command> commands2 = clusterClient.addCommandsToCluster(cluster1.getId(), newCmds); for (final Command command : commands2) { LOG.info("Command = " + command); } LOG.info("Updating set of commands files associated with id " + cluster1.getId()); //This should remove the original config leaving only the two in this set final List<Command> commands3 = clusterClient.updateCommandsForCluster(cluster1.getId(), newCmds); for (final Command command : commands3) { LOG.info("Command = " + command); } LOG.info("Deleting the command from the cluster with id " + ID + "something"); final Set<Command> commands4 = clusterClient.removeCommandForCluster(cluster1.getId(), ID + "something"); for (final Command command : commands4) { LOG.info("Command = " + command); } LOG.info("Deleting all the commands from the command with id " + command1.getId()); final List<Command> commands5 = clusterClient.removeAllCommandsForCluster(cluster1.getId()); for (final Command command : commands5) { //Shouldn't print anything LOG.info("Command = " + command); } LOG.info("Updating existing cluster config"); cluster2.setStatus(ClusterStatus.TERMINATED); final Cluster cluster3 = clusterClient.updateCluster(cluster2.getId(), cluster2); LOG.info("Cluster updated:"); LOG.info(cluster3.toString()); LOG.info("Deleting cluster config using id"); final Cluster cluster4 = clusterClient.deleteCluster(cluster1.getId()); LOG.info("Deleted cluster config with id: " + cluster1.getId()); LOG.info(cluster4.toString()); LOG.info("Deleting command config using id"); final Command command5 = commandClient.deleteCommand(command1.getId()); LOG.info("Deleted command config with id: " + command1.getId()); LOG.info(command5.toString()); LOG.info("Deleting commands in newCmd"); for (final Command cmd : newCmds) { commandClient.deleteCommand(cmd.getId()); } LOG.info("Deleting application config using id"); final Application app3 = appClient.deleteApplication(app1.getId()); LOG.info("Deleted application config with id: " + app1.getId()); LOG.info(app3.toString()); LOG.info("Deleting application config using id"); final Application app4 = appClient.deleteApplication(app2.getId()); LOG.info("Deleted application config with id: " + app2.getId()); LOG.info(app4.toString()); LOG.info("Done"); }
From source file:org.commoncrawl.service.parser.server.ParserSlaveServer.java
public static void main(String[] args) { Multimap<String, String> options = TreeMultimap.create(); for (int i = 0; i < args.length; ++i) { String optionName = args[i]; if (++i != args.length) { String optionValue = args[i]; options.put(optionName, optionValue); }//from www .j av a2 s . c o m } options.removeAll("--server"); options.put("--server", ParserSlaveServer.class.getName()); Collection<Entry<String, String>> entrySet = options.entries(); String finalArgs[] = new String[entrySet.size() * 2]; int index = 0; for (Entry entry : entrySet) { finalArgs[index++] = (String) entry.getKey(); finalArgs[index++] = (String) entry.getValue(); } try { CommonCrawlServer.main(finalArgs); } catch (Exception e) { LOG.error(CCStringUtils.stringifyException(e)); } }
From source file:org.hupo.psi.mi.example.xml.CreateEntryPerPublication.java
public static void main(String[] args) throws Exception { // will read this inputFile final PsimiXmlVersion xmlVersion = PsimiXmlVersion.VERSION_254; final File inputFile = new File("d:/Downloads/imex-mpidb.xml"); final File outputFile = new File("d:/Downloads/lala.xml"); // action!// w w w. j av a2 s . co m // We will use a multimap (from the google collections library) to store // the interactions grouped by publication id Multimap<String, Interaction> publicationMap = HashMultimap.create(); // Read the file PsimiXmlReader reader = new PsimiXmlReader(xmlVersion); EntrySet entrySet = reader.read(inputFile); // Iterate through the entries for (Entry entry : entrySet.getEntries()) { for (Interaction interaction : entry.getInteractions()) { String publicationId = findPublicationId(interaction); publicationMap.put(publicationId, interaction); } } // now create an Entry per interaction EntrySet newEntrySet = new EntrySet(xmlVersion); // get first source from the original inputFile Source source = entrySet.getEntries().iterator().next().getSource(); // iterating through the multimap, we get the grouped interactions for (Map.Entry<String, Collection<Interaction>> pubInteractions : publicationMap.asMap().entrySet()) { Entry entry = new Entry(pubInteractions.getValue()); entry.setSource(source); newEntrySet.getEntries().add(entry); } // write the output file PsimiXmlWriter psimiXmlWriter = new PsimiXmlWriter(xmlVersion); psimiXmlWriter.write(newEntrySet, outputFile); }
From source file:org.terasology.documentation.BindingScraper.java
/** * @param args (ignored)//from w w w .ja v a 2 s. c om * @throws Exception if the module environment cannot be loaded */ public static void main(String[] args) throws Exception { ModuleManager moduleManager = ModuleManagerFactory.create(); // Holds normal input mappings where there is only one key Multimap<InputCategory, String> categories = ArrayListMultimap.create(); Multimap<String, Input> keys = ArrayListMultimap.create(); Map<String, String> desc = new HashMap<>(); for (Class<?> holdingType : moduleManager.getEnvironment().getTypesAnnotatedWith(InputCategory.class)) { InputCategory inputCategory = holdingType.getAnnotation(InputCategory.class); categories.put(inputCategory, null); for (String button : inputCategory.ordering()) { categories.put(inputCategory, button); } } for (Class<?> buttonEvent : moduleManager.getEnvironment() .getTypesAnnotatedWith(RegisterBindButton.class)) { DefaultBinding defBinding = buttonEvent.getAnnotation(DefaultBinding.class); RegisterBindButton info = buttonEvent.getAnnotation(RegisterBindButton.class); String cat = info.category(); String id = "engine:" + info.id(); desc.put(id, info.description()); if (cat.isEmpty()) { InputCategory inputCategory = findEntry(categories, id); if (inputCategory == null) { System.out.println("Invalid category for: " + info.id()); } } else { InputCategory inputCategory = findCategory(categories, cat); if (inputCategory != null) { categories.put(inputCategory, id); } else { System.out.println("Invalid category for: " + info.id()); } } if (defBinding != null) { // This handles bindings with just one key Input input = defBinding.type().getInput(defBinding.id()); keys.put(id, input); } else { // See if there is a multi-mapping for this button DefaultBindings multiBinding = buttonEvent.getAnnotation(DefaultBindings.class); // Annotation math magic. We're expecting a DefaultBindings containing one DefaultBinding pair if (multiBinding != null && multiBinding.value().length == 2) { DefaultBinding[] bindings = multiBinding.value(); Input primary = bindings[0].type().getInput(bindings[0].id()); Input secondary = bindings[1].type().getInput(bindings[1].id()); keys.put(id, primary); keys.put(id, secondary); } } } for (InputCategory row : categories.keySet()) { System.out.println("# " + row.displayName()); categories.get(row).stream().filter(entry -> entry != null) .forEach(entry -> System.out.println(desc.get(entry) + ": " + keys.get(entry))); } }
From source file:org.apache.ctakes.temporal.data.analysis.PrintInconsistentAnnotations.java
public static void main(String[] args) throws Exception { Options options = CliFactory.parseArguments(Options.class, args); int windowSize = 50; List<Integer> patientSets = options.getPatients().getList(); List<Integer> trainItems = THYMEData.getPatientSets(patientSets, THYMEData.TRAIN_REMAINDERS); List<File> files = THYMEData.getFilesFor(trainItems, options.getRawTextDirectory()); CollectionReader reader = UriCollectionReader.getCollectionReaderFromFiles(files); AggregateBuilder aggregateBuilder = new AggregateBuilder(); aggregateBuilder.add(UriToDocumentTextAnnotator.getDescription()); aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(XMIReader.class, XMIReader.PARAM_XMI_DIRECTORY, options.getXMIDirectory())); int totalDocTimeRels = 0; int totalInconsistentDocTimeRels = 0; for (Iterator<JCas> casIter = new JCasIterator(reader, aggregateBuilder.createAggregate()); casIter .hasNext();) {//from w w w . j av a 2 s. co m JCas jCas = casIter.next(); String text = jCas.getDocumentText(); JCas goldView = jCas.getView("GoldView"); // group events by their narrative container Multimap<Annotation, EventMention> containers = HashMultimap.create(); for (TemporalTextRelation relation : JCasUtil.select(goldView, TemporalTextRelation.class)) { if (relation.getCategory().equals("CONTAINS")) { Annotation arg1 = relation.getArg1().getArgument(); Annotation arg2 = relation.getArg2().getArgument(); if (arg2 instanceof EventMention) { EventMention event = (EventMention) arg2; containers.put(arg1, event); } } } // check each container for inconsistent DocTimeRels for (Annotation container : containers.keySet()) { Set<String> docTimeRels = Sets.newHashSet(); for (EventMention event : containers.get(container)) { docTimeRels.add(event.getEvent().getProperties().getDocTimeRel()); } totalDocTimeRels += docTimeRels.size(); boolean inconsistentDocTimeRels; if (container instanceof EventMention) { EventMention mention = ((EventMention) container); String containerDocTimeRel = mention.getEvent().getProperties().getDocTimeRel(); inconsistentDocTimeRels = false; for (String docTimeRel : docTimeRels) { if (docTimeRel.equals(containerDocTimeRel)) { continue; } if (containerDocTimeRel.equals("BEFORE/OVERLAP") && (docTimeRel.equals("BEFORE") || docTimeRel.equals("OVERLAP"))) { continue; } inconsistentDocTimeRels = true; break; } } else { if (docTimeRels.size() == 1) { inconsistentDocTimeRels = false; } else if (docTimeRels.contains("BEFORE/OVERLAP")) { inconsistentDocTimeRels = docTimeRels.size() == 1 && (docTimeRels.contains("BEFORE") || docTimeRels.contains("OVERLAP")); } else { inconsistentDocTimeRels = true; } } // if inconsistent: print events, DocTimeRels and surrounding context if (inconsistentDocTimeRels) { totalInconsistentDocTimeRels += docTimeRels.size(); List<Integer> offsets = Lists.newArrayList(); offsets.add(container.getBegin()); offsets.add(container.getEnd()); for (EventMention event : containers.get(container)) { offsets.add(event.getBegin()); offsets.add(event.getEnd()); } Collections.sort(offsets); int begin = Math.max(offsets.get(0) - windowSize, 0); int end = Math.min(offsets.get(offsets.size() - 1) + windowSize, text.length()); System.err.printf("Inconsistent DocTimeRels in %s, ...%s...\n", new File(ViewUriUtil.getURI(jCas)).getName(), text.substring(begin, end).replaceAll("([\r\n])[\r\n]+", "$1")); if (container instanceof EventMention) { System.err.printf("Container: \"%s\" (docTimeRel=%s)\n", container.getCoveredText(), ((EventMention) container).getEvent().getProperties().getDocTimeRel()); } else { System.err.printf("Container: \"%s\"\n", container.getCoveredText()); } Ordering<EventMention> byBegin = Ordering.natural() .onResultOf(new Function<EventMention, Integer>() { @Override public Integer apply(@Nullable EventMention event) { return event.getBegin(); } }); for (EventMention event : byBegin.sortedCopy(containers.get(container))) { System.err.printf("* \"%s\" (docTimeRel=%s)\n", event.getCoveredText(), event.getEvent().getProperties().getDocTimeRel()); } System.err.println(); } } } System.err.printf("Inconsistent DocTimeRels: %.1f%% (%d/%d)\n", 100.0 * totalInconsistentDocTimeRels / totalDocTimeRels, totalInconsistentDocTimeRels, totalDocTimeRels); }
From source file:org.commoncrawl.service.parser.ec2.EC2ParserMaster.java
public static void main(String[] args) throws IOException { Multimap<String, String> options = TreeMultimap.create(); for (int i = 0; i < args.length; ++i) { String optionName = args[i]; if (++i != args.length) { String optionValue = args[i]; options.put(optionName, optionValue); }/*www .j av a2 s. c o m*/ } options.removeAll("--server"); options.put("--server", EC2ParserMaster.class.getName()); Collection<Entry<String, String>> entrySet = options.entries(); String finalArgs[] = new String[entrySet.size() * 2]; int index = 0; for (Entry entry : entrySet) { finalArgs[index++] = (String) entry.getKey(); finalArgs[index++] = (String) entry.getValue(); } try { CommonCrawlServer.main(finalArgs); } catch (Exception e) { LOG.error(CCStringUtils.stringifyException(e)); } }
From source file:org.crypto.sse.IEX2LevAMAZON.java
/** * @param args// w w w .ja va 2s . c o m * @throws Exception */ @SuppressWarnings("null") public static void main(String[] args) throws Exception { Printer.addPrinter(new Printer(Printer.LEVEL.EXTRA)); // First Job Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "IEX-2Lev"); job.setJarByClass(IEX2LevAMAZON.class); job.setMapperClass(MLK1.class); job.setReducerClass(RLK1.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); job.setOutputKeyClass(Text.class); job.setNumReduceTasks(1); job.setOutputValueClass(ArrayListWritable.class); job.setInputFormatClass(FileNameKeyInputFormat.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); // Second Job Configuration conf2 = new Configuration(); Job job2 = Job.getInstance(conf2, "IEX-2Lev"); job2.setJarByClass(IEX2LevAMAZON.class); job2.setMapperClass(MLK2.class); job2.setReducerClass(RLK2.class); job2.setNumReduceTasks(1); job2.setMapOutputKeyClass(Text.class); job2.setMapOutputValueClass(Text.class); job2.setOutputKeyClass(Text.class); job2.setOutputValueClass(ArrayListWritable.class); job2.setInputFormatClass(FileNameKeyInputFormat.class); FileInputFormat.addInputPath(job2, new Path(args[0])); FileOutputFormat.setOutputPath(job2, new Path(args[2])); job.waitForCompletion(true); job2.waitForCompletion(true); // Here add your Amazon Credentials AWSCredentials credentials = new BasicAWSCredentials("XXXXXXXXXXXXXXXX", "XXXXXXXXXXXXXXXX"); // create a client connection based on credentials AmazonS3 s3client = new AmazonS3Client(credentials); // create bucket - name must be unique for all S3 users String bucketName = "iexmaptest"; S3Object s3object = s3client.getObject(new GetObjectRequest(bucketName, args[4])); Printer.debugln(s3object.getObjectMetadata().getContentType()); Printer.debugln("" + s3object.getObjectMetadata().getContentLength()); List<String> lines = new ArrayList<String>(); String folderName = "2"; BufferedReader reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent())); String line; int counter = 0; while ((line = reader.readLine()) != null) { // can copy the content locally as well // using a buffered writer lines.add(line); Printer.debugln(line); // upload file to folder String fileName = folderName + "/" + Integer.toString(counter); ByteArrayInputStream input = new ByteArrayInputStream(line.getBytes()); s3client.putObject(bucketName, fileName, input, new ObjectMetadata()); counter++; } Multimap<String, String> lookup = ArrayListMultimap.create(); for (int i = 0; i < lines.size(); i++) { String[] tokens = lines.get(i).split("\\s+"); for (int j = 1; j < tokens.length; j++) { lookup.put(tokens[0], tokens[j]); } } // Loading inverted index that associates files identifiers to keywords lines = new ArrayList<String>(); s3object = s3client.getObject(new GetObjectRequest(bucketName, args[5])); Printer.debugln(s3object.getObjectMetadata().getContentType()); Printer.debugln("" + s3object.getObjectMetadata().getContentLength()); // Loading inverted index that associates keywords to identifiers reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent())); while ((line = reader.readLine()) != null) { lines.add(line); } Multimap<String, String> lookup2 = ArrayListMultimap.create(); for (int i = 0; i < lines.size(); i++) { String[] tokens = lines.get(i).split("\\s+"); for (int j = 1; j < tokens.length; j++) { lookup2.put(tokens[0], tokens[j]); } } // Delete File try { s3client.deleteObject(new DeleteObjectRequest(bucketName, args[4])); } catch (AmazonServiceException ase) { Printer.debugln("Caught an AmazonServiceException."); Printer.debugln("Error Message: " + ase.getMessage()); Printer.debugln("HTTP Status Code: " + ase.getStatusCode()); Printer.debugln("AWS Error Code: " + ase.getErrorCode()); Printer.debugln("Error Type: " + ase.getErrorType()); Printer.debugln("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { Printer.debugln("Caught an AmazonClientException."); Printer.debugln("Error Message: " + ace.getMessage()); } /* * Start of IEX-2Lev construction */ // Generation of keys for IEX-2Lev BufferedReader keyRead = new BufferedReader(new InputStreamReader(System.in)); System.out.println("Enter your password :"); String pass = keyRead.readLine(); // You can change the size of the key; Here we set it to 128 List<byte[]> listSK = IEX2Lev.keyGen(128, pass, "salt/salt", 100000); // Generation of Local Multi-maps with Mapper job only without reducer Configuration conf3 = new Configuration(); String testSerialization1 = new String(Base64.encodeBase64(Serializer.serialize(lookup))); String testSerialization2 = new String(Base64.encodeBase64(Serializer.serialize(lookup2))); String testSerialization3 = new String(Base64.encodeBase64(Serializer.serialize(listSK))); // String testSerialization2 = gson.toJson(lookup2); conf3.set("lookup", testSerialization1); conf3.set("lookup2", testSerialization2); conf3.set("setKeys", testSerialization3); Job job3 = Job.getInstance(conf3, "Local MM"); job3.setJarByClass(IEX2LevAMAZON.class); job3.setMapperClass(LocalMM.class); job3.setNumReduceTasks(0); FileInputFormat.addInputPath(job3, new Path(args[2])); FileOutputFormat.setOutputPath(job3, new Path(args[3])); job3.waitForCompletion(true); }
From source file:it.units.malelab.ege.MappingPropertiesExperimenter.java
public static void main(String[] args) throws IOException, InterruptedException, ExecutionException { final int n = 10000; final int nDist = 10000; //prepare problems and methods List<String> problems = Lists.newArrayList("bool-parity5", "bool-mopm3", "sr-keijzer6", "sr-nguyen7", "sr-pagie1", "sr-vladislavleva4", "other-klandscapes3", "other-klandscapes7", "other-text"); List<String> mappers = new ArrayList<>(); for (int gs : new int[] { 64, 128, 256, 512, 1024 }) { mappers.add("ge-" + gs + "-2"); mappers.add("ge-" + gs + "-4"); mappers.add("ge-" + gs + "-8"); mappers.add("ge-" + gs + "-12"); mappers.add("pige-" + gs + "-4"); mappers.add("pige-" + gs + "-8"); mappers.add("pige-" + gs + "-16"); mappers.add("pige-" + gs + "-24"); mappers.add("hge-" + gs + "-0"); mappers.add("whge-" + gs + "-2"); mappers.add("whge-" + gs + "-3"); mappers.add("whge-" + gs + "-5"); }//from ww w.j av a 2s. co m mappers.add("sge-0-5"); mappers.add("sge-0-6"); mappers.add("sge-0-7"); mappers.add("sge-0-8"); mappers.clear(); mappers.addAll(Lists.newArrayList("ge-1024-8", "pige-1024-16", "hge-1024-0", "whge-1024-3", "sge-0-6")); PrintStream filePrintStream = null; if (args.length > 0) { filePrintStream = new PrintStream(args[0]); } else { filePrintStream = System.out; } filePrintStream.printf("problem;mapper;genotypeSize;param;property;value%n"); //prepare distances Distance<Node<String>> phenotypeDistance = new CachedDistance<>(new LeavesEdit<String>()); Distance<Sequence> genotypeDistance = new CachedDistance<>(new Hamming()); //iterate for (String problemName : problems) { for (String mapperName : mappers) { System.out.printf("%20.20s, %20.20s", problemName, mapperName); //build problem Problem<String, NumericFitness> problem = null; if (problemName.equals("bool-parity5")) { problem = new Parity(5); } else if (problemName.equals("bool-mopm3")) { problem = new MultipleOutputParallelMultiplier(3); } else if (problemName.equals("sr-keijzer6")) { problem = new HarmonicCurve(); } else if (problemName.equals("sr-nguyen7")) { problem = new Nguyen7(1); } else if (problemName.equals("sr-pagie1")) { problem = new Pagie1(); } else if (problemName.equals("sr-vladislavleva4")) { problem = new Vladislavleva4(1); } else if (problemName.equals("other-klandscapes3")) { problem = new KLandscapes(3); } else if (problemName.equals("other-klandscapes7")) { problem = new KLandscapes(7); } else if (problemName.equals("other-text")) { problem = new Text(); } //build configuration and evolver Mapper mapper = null; int genotypeSize = Integer.parseInt(mapperName.split("-")[1]); int mapperMainParam = Integer.parseInt(mapperName.split("-")[2]); if (mapperName.split("-")[0].equals("ge")) { mapper = new StandardGEMapper<>(mapperMainParam, 1, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("pige")) { mapper = new PiGEMapper<>(mapperMainParam, 1, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("sge")) { mapper = new SGEMapper<>(mapperMainParam, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("hge")) { mapper = new HierarchicalMapper<>(problem.getGrammar()); } else if (mapperName.split("-")[0].equals("whge")) { mapper = new WeightedHierarchicalMapper<>(mapperMainParam, false, true, problem.getGrammar()); } //prepare things Random random = new Random(1); Set<Sequence> genotypes = new LinkedHashSet<>(n); //build genotypes if (mapperName.split("-")[0].equals("sge")) { SGEGenotypeFactory<String> factory = new SGEGenotypeFactory<>((SGEMapper) mapper); while (genotypes.size() < n) { genotypes.add(factory.build(random)); } genotypeSize = factory.getBitSize(); } else { BitsGenotypeFactory factory = new BitsGenotypeFactory(genotypeSize); while (genotypes.size() < n) { genotypes.add(factory.build(random)); } } //build and fill map Multimap<Node<String>, Sequence> multimap = HashMultimap.create(); int progress = 0; for (Sequence genotype : genotypes) { Node<String> phenotype; try { if (mapperName.split("-")[0].equals("sge")) { phenotype = mapper.map((SGEGenotype<String>) genotype, new HashMap<>()); } else { phenotype = mapper.map((BitsGenotype) genotype, new HashMap<>()); } } catch (MappingException e) { phenotype = Node.EMPTY_TREE; } multimap.put(phenotype, genotype); progress = progress + 1; if (progress % Math.round(n / 10) == 0) { System.out.print("."); } } System.out.println(); //compute distances List<Pair<Double, Double>> allDistances = new ArrayList<>(); List<Pair<Double, Double>> allValidDistances = new ArrayList<>(); Multimap<Node<String>, Double> genotypeDistances = ArrayListMultimap.create(); for (Node<String> phenotype : multimap.keySet()) { for (Sequence genotype1 : multimap.get(phenotype)) { for (Sequence genotype2 : multimap.get(phenotype)) { double gDistance = genotypeDistance.d(genotype1, genotype2); genotypeDistances.put(phenotype, gDistance); if (genotypeDistances.get(phenotype).size() > nDist) { break; } } if (genotypeDistances.get(phenotype).size() > nDist) { break; } } } List<Map.Entry<Node<String>, Sequence>> entries = new ArrayList<>(multimap.entries()); Collections.shuffle(entries, random); for (Map.Entry<Node<String>, Sequence> entry1 : entries) { for (Map.Entry<Node<String>, Sequence> entry2 : entries) { double gDistance = genotypeDistance.d(entry1.getValue(), entry2.getValue()); double pDistance = phenotypeDistance.d(entry1.getKey(), entry2.getKey()); allDistances.add(new Pair<>(gDistance, pDistance)); if (!Node.EMPTY_TREE.equals(entry1.getKey()) && !Node.EMPTY_TREE.equals(entry2.getKey())) { allValidDistances.add(new Pair<>(gDistance, pDistance)); } if (allDistances.size() > nDist) { break; } } if (allDistances.size() > nDist) { break; } } //compute properties double invalidity = (double) multimap.get(Node.EMPTY_TREE).size() / (double) genotypes.size(); double redundancy = 1 - (double) multimap.keySet().size() / (double) genotypes.size(); double validRedundancy = redundancy; if (multimap.keySet().contains(Node.EMPTY_TREE)) { validRedundancy = 1 - ((double) multimap.keySet().size() - 1d) / (double) (genotypes.size() - multimap.get(Node.EMPTY_TREE).size()); } double locality = Utils.pearsonCorrelation(allDistances); double validLocality = Utils.pearsonCorrelation(allValidDistances); double[] sizes = new double[multimap.keySet().size()]; double[] meanGenotypeDistances = new double[multimap.keySet().size()]; int invalidIndex = -1; int c = 0; for (Node<String> phenotype : multimap.keySet()) { if (Node.EMPTY_TREE.equals(phenotype)) { invalidIndex = c; } sizes[c] = multimap.get(phenotype).size(); double[] distances = new double[genotypeDistances.get(phenotype).size()]; int k = 0; for (Double distance : genotypeDistances.get(phenotype)) { distances[k] = distance; k = k + 1; } meanGenotypeDistances[c] = StatUtils.mean(distances); c = c + 1; } double nonUniformity = Math.sqrt(StatUtils.variance(sizes)) / StatUtils.mean(sizes); double nonSynonymousity = StatUtils.mean(meanGenotypeDistances) / StatUtils.mean(firsts(allDistances)); double validNonUniformity = nonUniformity; double validNonSynonymousity = nonSynonymousity; if (invalidIndex != -1) { double[] validSizes = new double[multimap.keySet().size() - 1]; double[] validMeanGenotypeDistances = new double[multimap.keySet().size() - 1]; if (invalidIndex > 0) { System.arraycopy(sizes, 0, validSizes, 0, invalidIndex); System.arraycopy(meanGenotypeDistances, 0, validMeanGenotypeDistances, 0, invalidIndex); } System.arraycopy(sizes, invalidIndex + 1, validSizes, invalidIndex, sizes.length - invalidIndex - 1); System.arraycopy(meanGenotypeDistances, invalidIndex + 1, validMeanGenotypeDistances, invalidIndex, meanGenotypeDistances.length - invalidIndex - 1); validNonUniformity = Math.sqrt(StatUtils.variance(validSizes)) / StatUtils.mean(validSizes); validNonSynonymousity = StatUtils.mean(validMeanGenotypeDistances) / StatUtils.mean(firsts(allValidDistances)); } //compute locality filePrintStream.printf("%s;%s;%d;%d;invalidity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, invalidity); filePrintStream.printf("%s;%s;%d;%d;redundancy;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, redundancy); filePrintStream.printf("%s;%s;%d;%d;validRedundancy;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validRedundancy); filePrintStream.printf("%s;%s;%d;%d;locality;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, locality); filePrintStream.printf("%s;%s;%d;%d;validLLocality;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validLocality); filePrintStream.printf("%s;%s;%d;%d;nonUniformity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, nonUniformity); filePrintStream.printf("%s;%s;%d;%d;validNonUniformity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonUniformity); filePrintStream.printf("%s;%s;%d;%d;nonSynonymousity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, nonSynonymousity); filePrintStream.printf("%s;%s;%d;%d;validNonSynonymousity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonSynonymousity); } } if (filePrintStream != null) { filePrintStream.close(); } }
From source file:com.metabroadcast.common.intl.Countries.java
private static Multimap<Country, String> aliases() { Multimap<Country, String> map = HashMultimap.create(); map.put(GB, "UK"); return map;//w ww . j a va2s . c om }